From 57bb24aef16a3aa3fef000de99d95194d675809d Mon Sep 17 00:00:00 2001 From: matt Date: Thu, 14 Aug 2025 18:17:29 -0500 Subject: [PATCH] Fix: Auto-generate docs snippets and examples (#2139) * fix: gitignore all the generated stuff * debug: try fixing build * debug: build error part ii * debug: move more deps out of dev * fix: lock * debug: lockfile * fix: make dir * fix: ci * fix: dir * debug: sed * fix: sed * debug: allow skipping * Revert "debug: allow skipping" This reverts commit 88e0ff870d4c13b05372773492ea149b1cc4244f. * debug: ci * fix: corepack * debug: dir * debug: sed * debug: path * fix: rm sync docs for now * fix: remove more stuff * fix: rm unused stuff * fix: rm copy:app * chore: lint * fix: rm prettier from boot * fix: couple missing scripts * feat: auto-gen examples on push to main * debug: test on this branch * fix: install pnpm * fix: cd * fix: cmd * Auto-generate files after merge [skip ci] * fix: only copy examples * debug: dummy commit for examples check * chore: regenerate examples * fix: naming * fix: unwind dummy * fix: only run on main * fix: pre commit * fix: naming * chore: gen, fix task pre * feat: create pr * feat: only push examples changes * fix: don't run from this branch * fix: regen lockfile * fix: regen docs lockfile --------- Co-authored-by: GitHub Action --- .github/workflows/docs.yml | 6 + .github/workflows/gen-examples.yml | 80 + .github/workflows/test.yml | 4 - .gitignore | 4 + Taskfile.yaml | 6 +- .../test_concurrency_cancel_in_progress.py | 47 + .../concurrency_cancel_in_progress/worker.py | 39 + .../test_concurrency_cancel_newest.py | 61 + .../concurrency_cancel_newest/worker.py | 39 + examples/python/dag/worker.py | 2 +- examples/python/worker.py | 6 + frontend/app/package.json | 5 +- frontend/app/pnpm-lock.yaml | 3114 ++++++++--------- .../app/src/lib/api/generated/http-client.ts | 1 + .../app/src/next/lib/docs/generated/_meta.ts | 58 - .../src/next/lib/docs/generated/blog/_meta.ts | 49 - .../lib/docs/generated/contributing/_meta.ts | 16 - .../src/next/lib/docs/generated/home/_meta.ts | 265 -- .../lib/docs/generated/home/compute/_meta.ts | 32 - .../app/src/next/lib/docs/generated/index.ts | 21 - .../src/next/lib/docs/generated/sdks/_meta.ts | 11 - .../lib/docs/generated/sdks/python/_meta.ts | 32 - .../sdks/python/feature-clients/_meta.ts | 67 - .../lib/docs/generated/self-hosting/_meta.ts | 87 - .../snips/go/bulk-operations/index.ts | 3 - .../snips/go/bulk-operations/main.ts | 29 - .../next/lib/docs/generated/snips/go/index.ts | 17 - .../go/migration-guides/hatchet-client.ts | 12 - .../snips/go/migration-guides/index.ts | 5 - .../snips/go/migration-guides/mergent.ts | 41 - .../snips/go/quickstart/cmd/index.ts | 5 - .../snips/go/quickstart/cmd/run/index.ts | 3 - .../snips/go/quickstart/cmd/run/main.ts | 12 - .../snips/go/quickstart/cmd/worker/index.ts | 3 - .../snips/go/quickstart/cmd/worker/main.ts | 12 - .../hatchet_client/hatchet_client.ts | 12 - .../go/quickstart/hatchet_client/index.ts | 3 - .../generated/snips/go/quickstart/index.ts | 7 - .../go/quickstart/workflows/first_task.ts | 12 - .../snips/go/quickstart/workflows/index.ts | 3 - .../lib/docs/generated/snips/go/run/all.ts | 12 - .../lib/docs/generated/snips/go/run/bulk.ts | 17 - .../lib/docs/generated/snips/go/run/cron.ts | 25 - .../lib/docs/generated/snips/go/run/event.ts | 29 - .../lib/docs/generated/snips/go/run/index.ts | 13 - .../docs/generated/snips/go/run/priority.ts | 21 - .../lib/docs/generated/snips/go/run/simple.ts | 29 - .../snips/go/streaming/consumer/index.ts | 3 - .../snips/go/streaming/consumer/main.ts | 17 - .../generated/snips/go/streaming/index.ts | 9 - .../snips/go/streaming/server/index.ts | 3 - .../snips/go/streaming/server/main.ts | 17 - .../snips/go/streaming/shared/index.ts | 3 - .../snips/go/streaming/shared/task.ts | 17 - .../snips/go/streaming/worker/index.ts | 3 - .../snips/go/streaming/worker/main.ts | 12 - .../docs/generated/snips/go/worker/index.ts | 3 - .../docs/generated/snips/go/worker/start.ts | 12 - .../snips/go/workflows/cancellations.ts | 17 - .../snips/go/workflows/child-workflows.ts | 12 - .../snips/go/workflows/complex-conditions.ts | 41 - .../snips/go/workflows/concurrency-rr.ts | 21 - .../snips/go/workflows/dag-with-conditions.ts | 12 - .../docs/generated/snips/go/workflows/dag.ts | 25 - .../snips/go/workflows/durable-event.ts | 21 - .../snips/go/workflows/durable-sleep.ts | 17 - .../generated/snips/go/workflows/index.ts | 37 - .../snips/go/workflows/non-retryable-error.ts | 17 - .../generated/snips/go/workflows/on-cron.ts | 17 - .../generated/snips/go/workflows/on-event.ts | 25 - .../snips/go/workflows/on-failure.ts | 12 - .../generated/snips/go/workflows/priority.ts | 21 - .../generated/snips/go/workflows/ratelimit.ts | 25 - .../generated/snips/go/workflows/retries.ts | 25 - .../generated/snips/go/workflows/simple.ts | 29 - .../generated/snips/go/workflows/sticky.ts | 12 - .../generated/snips/go/workflows/timeouts.ts | 21 - .../go/z_v0/assignment-affinity/index.ts | 5 - .../snips/go/z_v0/assignment-affinity/main.ts | 12 - .../snips/go/z_v0/assignment-affinity/run.ts | 12 - .../snips/go/z_v0/assignment-sticky/index.ts | 5 - .../snips/go/z_v0/assignment-sticky/main.ts | 12 - .../snips/go/z_v0/assignment-sticky/run.ts | 21 - .../snips/go/z_v0/bulk_imports/index.ts | 3 - .../snips/go/z_v0/bulk_imports/main.ts | 12 - .../snips/go/z_v0/bulk_workflows/index.ts | 5 - .../snips/go/z_v0/bulk_workflows/main.ts | 12 - .../snips/go/z_v0/bulk_workflows/run.ts | 12 - .../snips/go/z_v0/cancellation/index.ts | 5 - .../snips/go/z_v0/cancellation/main.ts | 12 - .../snips/go/z_v0/cancellation/run.ts | 12 - .../generated/snips/go/z_v0/compute/index.ts | 3 - .../generated/snips/go/z_v0/compute/main.ts | 12 - .../snips/go/z_v0/concurrency/index.ts | 3 - .../snips/go/z_v0/concurrency/main.ts | 12 - .../snips/go/z_v0/cron-programmatic/index.ts | 3 - .../snips/go/z_v0/cron-programmatic/main.ts | 25 - .../generated/snips/go/z_v0/cron/index.ts | 3 - .../docs/generated/snips/go/z_v0/cron/main.ts | 17 - .../docs/generated/snips/go/z_v0/dag/index.ts | 3 - .../docs/generated/snips/go/z_v0/dag/main.ts | 12 - .../snips/go/z_v0/deprecated/index.ts | 9 - .../z_v0/deprecated/requeue/.hatchet/index.ts | 3 - .../requeue/.hatchet/job-requeue-workflow.ts | 12 - .../snips/go/z_v0/deprecated/requeue/index.ts | 5 - .../snips/go/z_v0/deprecated/requeue/main.ts | 12 - .../schedule-timeout/.hatchet/index.ts | 3 - .../.hatchet/schedule-timeout-workflow.ts | 13 - .../z_v0/deprecated/schedule-timeout/index.ts | 5 - .../z_v0/deprecated/schedule-timeout/main.ts | 12 - .../z_v0/deprecated/timeout/.hatchet/index.ts | 5 - .../timeout/.hatchet/job-timeout-workflow.ts | 12 - .../timeout/.hatchet/step-timeout-workflow.ts | 12 - .../snips/go/z_v0/deprecated/timeout/index.ts | 5 - .../snips/go/z_v0/deprecated/timeout/main.ts | 12 - .../go/z_v0/deprecated/yaml/.hatchet/index.ts | 3 - .../yaml/.hatchet/sample-workflow.ts | 12 - .../snips/go/z_v0/deprecated/yaml/README.ts | 12 - .../snips/go/z_v0/deprecated/yaml/index.ts | 7 - .../snips/go/z_v0/deprecated/yaml/main.ts | 12 - .../snips/go/z_v0/errors-test/index.ts | 3 - .../snips/go/z_v0/errors-test/main.ts | 12 - .../lib/docs/generated/snips/go/z_v0/index.ts | 61 - .../cancel-in-progress/index.ts | 3 - .../cancel-in-progress/main.ts | 12 - .../group-round-robin-advanced/index.ts | 3 - .../group-round-robin-advanced/main.ts | 12 - .../group-round-robin/index.ts | 3 - .../group-round-robin/main.ts | 12 - .../snips/go/z_v0/limit-concurrency/index.ts | 7 - .../generated/snips/go/z_v0/logging/index.ts | 3 - .../generated/snips/go/z_v0/logging/main.ts | 12 - .../snips/go/z_v0/manual-trigger/index.ts | 5 - .../go/z_v0/manual-trigger/trigger/index.ts | 3 - .../go/z_v0/manual-trigger/trigger/main.ts | 12 - .../go/z_v0/manual-trigger/worker/index.ts | 3 - .../go/z_v0/manual-trigger/worker/main.ts | 12 - .../snips/go/z_v0/middleware/index.ts | 5 - .../snips/go/z_v0/middleware/main.ts | 12 - .../generated/snips/go/z_v0/middleware/run.ts | 12 - .../snips/go/z_v0/namespaced/index.ts | 3 - .../snips/go/z_v0/namespaced/main.ts | 12 - .../generated/snips/go/z_v0/no-tls/index.ts | 3 - .../generated/snips/go/z_v0/no-tls/main.ts | 12 - .../snips/go/z_v0/on-failure/index.ts | 3 - .../snips/go/z_v0/on-failure/main.ts | 17 - .../snips/go/z_v0/procedural/index.ts | 3 - .../snips/go/z_v0/procedural/main.ts | 12 - .../snips/go/z_v0/rate-limit/index.ts | 3 - .../snips/go/z_v0/rate-limit/main.ts | 12 - .../snips/go/z_v0/register-action/index.ts | 3 - .../snips/go/z_v0/register-action/main.ts | 12 - .../go/z_v0/retries-with-backoff/index.ts | 3 - .../go/z_v0/retries-with-backoff/main.ts | 17 - .../generated/snips/go/z_v0/retries/index.ts | 3 - .../generated/snips/go/z_v0/retries/main.ts | 12 - .../snips/go/z_v0/scheduled/index.ts | 3 - .../generated/snips/go/z_v0/scheduled/main.ts | 25 - .../generated/snips/go/z_v0/simple/index.ts | 3 - .../generated/snips/go/z_v0/simple/main.ts | 12 - .../go/z_v0/stream-event-by-meta/index.ts | 3 - .../go/z_v0/stream-event-by-meta/main.ts | 12 - .../snips/go/z_v0/stream-event/index.ts | 3 - .../snips/go/z_v0/stream-event/main.ts | 12 - .../generated/snips/go/z_v0/timeout/index.ts | 5 - .../generated/snips/go/z_v0/timeout/main.ts | 17 - .../generated/snips/go/z_v0/timeout/run.ts | 12 - .../generated/snips/go/z_v0/webhook/index.ts | 5 - .../generated/snips/go/z_v0/webhook/main.ts | 12 - .../generated/snips/go/z_v0/webhook/run.ts | 12 - .../next/lib/docs/generated/snips/index.ts | 9 - .../docs/generated/snips/python/__init__.ts | 11 - .../snips/python/affinity_workers/index.ts | 5 - .../snips/python/affinity_workers/trigger.ts | 12 - .../snips/python/affinity_workers/worker.ts | 25 - .../docs/generated/snips/python/api/api.ts | 12 - .../generated/snips/python/api/async_api.ts | 12 - .../docs/generated/snips/python/api/index.ts | 5 - .../blocked_async/blocking_example_trigger.ts | 17 - .../blocked_async/blocking_example_worker.ts | 17 - .../snips/python/blocked_async/debugging.ts | 25 - .../snips/python/blocked_async/index.ts | 11 - .../snips/python/blocked_async/trigger.ts | 12 - .../snips/python/blocked_async/worker.ts | 12 - .../snips/python/bulk_fanout/bulk_trigger.ts | 12 - .../snips/python/bulk_fanout/index.ts | 11 - .../snips/python/bulk_fanout/stream.ts | 12 - .../python/bulk_fanout/test_bulk_fanout.ts | 12 - .../snips/python/bulk_fanout/trigger.ts | 12 - .../snips/python/bulk_fanout/worker.ts | 17 - .../snips/python/bulk_operations/cancel.ts | 29 - .../snips/python/bulk_operations/index.ts | 9 - .../snips/python/bulk_operations/replay.ts | 29 - .../bulk_operations/test_bulk_replay.ts | 12 - .../snips/python/bulk_operations/worker.ts | 12 - .../snips/python/cancellation/index.ts | 7 - .../python/cancellation/test_cancellation.ts | 12 - .../snips/python/cancellation/trigger.ts | 12 - .../snips/python/cancellation/worker.ts | 21 - .../docs/generated/snips/python/child/bulk.ts | 25 - .../generated/snips/python/child/index.ts | 9 - .../snips/python/child/simple-fanout.ts | 17 - .../generated/snips/python/child/trigger.ts | 29 - .../generated/snips/python/child/worker.ts | 17 - .../snips/python/concurrency_limit/index.ts | 5 - .../snips/python/concurrency_limit/trigger.ts | 12 - .../snips/python/concurrency_limit/worker.ts | 17 - .../python/concurrency_limit_rr/index.ts | 7 - .../test_concurrency_limit_rr.ts | 12 - .../python/concurrency_limit_rr/trigger.ts | 12 - .../python/concurrency_limit_rr/worker.ts | 17 - .../python/concurrency_limit_rr_load/event.ts | 12 - .../python/concurrency_limit_rr_load/index.ts | 5 - .../concurrency_limit_rr_load/worker.ts | 12 - .../python/concurrency_multiple_keys/index.ts | 5 - .../test_multiple_concurrency_keys.ts | 13 - .../concurrency_multiple_keys/worker.ts | 17 - .../concurrency_workflow_level/index.ts | 5 - .../test_workflow_level_concurrency.ts | 13 - .../concurrency_workflow_level/worker.ts | 17 - .../snips/python/conditions/index.ts | 7 - .../python/conditions/test_conditions.ts | 12 - .../snips/python/conditions/trigger.ts | 12 - .../snips/python/conditions/worker.ts | 45 - .../docs/generated/snips/python/cron/index.ts | 7 - .../snips/python/cron/programatic-async.ts | 29 - .../snips/python/cron/programatic-sync.ts | 29 - .../snips/python/cron/workflow-definition.ts | 17 - .../docs/generated/snips/python/dag/index.ts | 7 - .../generated/snips/python/dag/test_dag.ts | 12 - .../generated/snips/python/dag/trigger.ts | 12 - .../docs/generated/snips/python/dag/worker.ts | 12 - .../generated/snips/python/dedupe/index.ts | 3 - .../generated/snips/python/dedupe/worker.ts | 12 - .../generated/snips/python/delayed/index.ts | 7 - .../snips/python/delayed/test_delayed.ts | 12 - .../generated/snips/python/delayed/trigger.ts | 12 - .../generated/snips/python/delayed/worker.ts | 12 - .../python/dependency_injection/index.ts | 5 - .../test_dependency_injection.ts | 12 - .../python/dependency_injection/worker.ts | 21 - .../generated/snips/python/durable/index.ts | 7 - .../snips/python/durable/test_durable.ts | 12 - .../generated/snips/python/durable/trigger.ts | 12 - .../generated/snips/python/durable/worker.ts | 25 - .../snips/python/durable_event/index.ts | 5 - .../snips/python/durable_event/trigger.ts | 12 - .../snips/python/durable_event/worker.ts | 21 - .../snips/python/durable_sleep/index.ts | 5 - .../snips/python/durable_sleep/trigger.ts | 12 - .../snips/python/durable_sleep/worker.ts | 17 - .../generated/snips/python/events/event.ts | 17 - .../generated/snips/python/events/filter.ts | 25 - .../generated/snips/python/events/index.ts | 9 - .../snips/python/events/test_event.ts | 12 - .../generated/snips/python/events/worker.ts | 25 - .../generated/snips/python/fanout/index.ts | 11 - .../generated/snips/python/fanout/stream.ts | 12 - .../snips/python/fanout/sync_stream.ts | 12 - .../snips/python/fanout/test_fanout.ts | 12 - .../generated/snips/python/fanout/trigger.ts | 12 - .../generated/snips/python/fanout/worker.ts | 21 - .../snips/python/fanout_sync/index.ts | 7 - .../python/fanout_sync/test_fanout_sync.ts | 12 - .../snips/python/fanout_sync/trigger.ts | 12 - .../snips/python/fanout_sync/worker.ts | 12 - .../generated/snips/python/hatchet_client.ts | 12 - .../lib/docs/generated/snips/python/index.ts | 99 - .../generated/snips/python/lifespans/index.ts | 9 - .../snips/python/lifespans/simple.ts | 17 - .../snips/python/lifespans/test_lifespans.ts | 12 - .../snips/python/lifespans/trigger.ts | 12 - .../snips/python/lifespans/worker.ts | 21 - .../generated/snips/python/logger/client.ts | 17 - .../generated/snips/python/logger/index.ts | 11 - .../snips/python/logger/test_logger.ts | 12 - .../generated/snips/python/logger/trigger.ts | 12 - .../generated/snips/python/logger/worker.ts | 12 - .../generated/snips/python/logger/workflow.ts | 21 - .../snips/python/manual_slot_release/index.ts | 3 - .../python/manual_slot_release/worker.ts | 17 - .../snips/python/migration_guides/__init__.ts | 11 - .../python/migration_guides/hatchet_client.ts | 11 - .../snips/python/migration_guides/index.ts | 7 - .../snips/python/migration_guides/mergent.ts | 37 - .../snips/python/non_retryable/index.ts | 7 - .../python/non_retryable/test_no_retry.ts | 12 - .../snips/python/non_retryable/trigger.ts | 12 - .../snips/python/non_retryable/worker.ts | 17 - .../snips/python/on_failure/index.ts | 7 - .../python/on_failure/test_on_failure.ts | 12 - .../snips/python/on_failure/trigger.ts | 12 - .../snips/python/on_failure/worker.ts | 21 - .../snips/python/on_success/index.ts | 5 - .../snips/python/on_success/trigger.ts | 12 - .../snips/python/on_success/worker.ts | 12 - .../opentelemetry_instrumentation/client.ts | 11 - .../opentelemetry_instrumentation/index.ts | 11 - .../langfuse/client.ts | 21 - .../langfuse/index.ts | 7 - .../langfuse/trigger.ts | 17 - .../langfuse/worker.ts | 17 - .../opentelemetry_instrumentation/tracer.ts | 12 - .../opentelemetry_instrumentation/triggers.ts | 12 - .../opentelemetry_instrumentation/worker.ts | 12 - .../generated/snips/python/priority/index.ts | 7 - .../snips/python/priority/test_priority.ts | 12 - .../snips/python/priority/trigger.ts | 21 - .../generated/snips/python/priority/worker.ts | 17 - .../snips/python/quickstart/README.ts | 12 - .../snips/python/quickstart/__init__.ts | 11 - .../snips/python/quickstart/gitignore.ts | 12 - .../snips/python/quickstart/hatchet_client.ts | 12 - .../snips/python/quickstart/index.ts | 19 - .../snips/python/quickstart/poetry.ts | 12 - .../snips/python/quickstart/pyproject.ts | 12 - .../generated/snips/python/quickstart/run.ts | 12 - .../snips/python/quickstart/worker.ts | 12 - .../python/quickstart/workflows/__init__.ts | 11 - .../python/quickstart/workflows/first_task.ts | 12 - .../python/quickstart/workflows/index.ts | 5 - .../snips/python/rate_limit/dynamic.ts | 12 - .../snips/python/rate_limit/index.ts | 7 - .../snips/python/rate_limit/trigger.ts | 12 - .../snips/python/rate_limit/worker.ts | 25 - .../generated/snips/python/retries/index.ts | 3 - .../generated/snips/python/retries/worker.ts | 25 - .../snips/python/return_exceptions/index.ts | 5 - .../test_return_exceptions.ts | 12 - .../snips/python/return_exceptions/worker.ts | 12 - .../generated/snips/python/scheduled/index.ts | 5 - .../python/scheduled/programatic-async.ts | 29 - .../python/scheduled/programatic-sync.ts | 29 - .../generated/snips/python/simple/index.ts | 7 - .../python/simple/test_simple_workflow.ts | 12 - .../generated/snips/python/simple/trigger.ts | 11 - .../generated/snips/python/simple/worker.ts | 17 - .../snips/python/sticky_workers/event.ts | 12 - .../snips/python/sticky_workers/index.ts | 5 - .../snips/python/sticky_workers/worker.ts | 21 - .../snips/python/streaming/async_stream.ts | 17 - .../snips/python/streaming/fastapi_proxy.ts | 17 - .../generated/snips/python/streaming/index.ts | 9 - .../snips/python/streaming/test_streaming.ts | 12 - .../snips/python/streaming/worker.ts | 17 - .../generated/snips/python/timeout/index.ts | 7 - .../snips/python/timeout/test_timeout.ts | 12 - .../generated/snips/python/timeout/trigger.ts | 12 - .../generated/snips/python/timeout/worker.ts | 25 - .../snips/python/unit_testing/index.ts | 5 - .../snips/python/unit_testing/test_unit.ts | 12 - .../snips/python/unit_testing/workflows.ts | 12 - .../generated/snips/python/webhooks/index.ts | 5 - .../snips/python/webhooks/test_webhooks.ts | 12 - .../generated/snips/python/webhooks/worker.ts | 17 - .../lib/docs/generated/snips/python/worker.ts | 12 - .../python/worker_existing_loop/index.ts | 3 - .../python/worker_existing_loop/worker.ts | 12 - .../python/workflow_registration/index.ts | 3 - .../python/workflow_registration/worker.ts | 17 - .../next/lib/docs/generated/snips/types.ts | 31 - .../snips/typescript/cancellations/index.ts | 7 - .../snips/typescript/cancellations/run.ts | 17 - .../snips/typescript/cancellations/worker.ts | 17 - .../typescript/cancellations/workflow.ts | 21 - .../snips/typescript/child_workflows/index.ts | 7 - .../snips/typescript/child_workflows/run.ts | 12 - .../typescript/child_workflows/worker.ts | 12 - .../typescript/child_workflows/workflow.ts | 21 - .../snips/typescript/concurrency-rr/index.ts | 9 - .../snips/typescript/concurrency-rr/load.ts | 12 - .../snips/typescript/concurrency-rr/run.ts | 12 - .../snips/typescript/concurrency-rr/worker.ts | 12 - .../typescript/concurrency-rr/workflow.ts | 21 - .../generated/snips/typescript/dag/index.ts | 9 - .../typescript/dag/interface-workflow.ts | 12 - .../generated/snips/typescript/dag/run.ts | 12 - .../generated/snips/typescript/dag/worker.ts | 12 - .../snips/typescript/dag/workflow.ts | 17 - .../dag_match_condition/complex-workflow.ts | 41 - .../typescript/dag_match_condition/event.ts | 12 - .../typescript/dag_match_condition/index.ts | 11 - .../typescript/dag_match_condition/run.ts | 12 - .../typescript/dag_match_condition/worker.ts | 12 - .../dag_match_condition/workflow.ts | 12 - .../generated/snips/typescript/deep/index.ts | 7 - .../generated/snips/typescript/deep/run.ts | 12 - .../generated/snips/typescript/deep/worker.ts | 12 - .../snips/typescript/deep/workflow.ts | 12 - .../snips/typescript/durable-event/event.ts | 12 - .../snips/typescript/durable-event/index.ts | 9 - .../snips/typescript/durable-event/run.ts | 12 - .../snips/typescript/durable-event/worker.ts | 12 - .../typescript/durable-event/workflow.ts | 21 - .../snips/typescript/durable-sleep/event.ts | 12 - .../snips/typescript/durable-sleep/index.ts | 9 - .../snips/typescript/durable-sleep/run.ts | 12 - .../snips/typescript/durable-sleep/worker.ts | 12 - .../typescript/durable-sleep/workflow.ts | 17 - .../snips/typescript/hatchet-client.ts | 12 - .../typescript/high-memory/child-worker.ts | 12 - .../snips/typescript/high-memory/index.ts | 9 - .../typescript/high-memory/parent-worker.ts | 12 - .../snips/typescript/high-memory/run.ts | 12 - .../high-memory/workflow-with-child.ts | 12 - .../docs/generated/snips/typescript/index.ts | 59 - .../snips/typescript/inferred-typing/index.ts | 7 - .../snips/typescript/inferred-typing/run.ts | 12 - .../typescript/inferred-typing/worker.ts | 12 - .../typescript/inferred-typing/workflow.ts | 12 - .../landing_page/durable-excution.ts | 21 - .../landing_page/event-signaling.ts | 17 - .../typescript/landing_page/flow-control.ts | 17 - .../snips/typescript/landing_page/index.ts | 13 - .../snips/typescript/landing_page/queues.ts | 21 - .../typescript/landing_page/scheduling.ts | 17 - .../typescript/landing_page/task-routing.ts | 17 - .../snips/typescript/legacy/index.ts | 7 - .../generated/snips/typescript/legacy/run.ts | 12 - .../snips/typescript/legacy/worker.ts | 12 - .../snips/typescript/legacy/workflow.ts | 12 - .../migration-guides/hatchet-client.ts | 12 - .../typescript/migration-guides/index.ts | 5 - .../typescript/migration-guides/mergent.ts | 37 - .../multiple_wf_concurrency/index.ts | 7 - .../typescript/multiple_wf_concurrency/run.ts | 12 - .../multiple_wf_concurrency/worker.ts | 12 - .../multiple_wf_concurrency/workflow.ts | 17 - .../snips/typescript/non_retryable/index.ts | 7 - .../snips/typescript/non_retryable/run.ts | 12 - .../snips/typescript/non_retryable/worker.ts | 12 - .../typescript/non_retryable/workflow.ts | 17 - .../snips/typescript/on_cron/index.ts | 5 - .../snips/typescript/on_cron/worker.ts | 12 - .../snips/typescript/on_cron/workflow.ts | 17 - .../snips/typescript/on_event/event.e2e.ts | 12 - .../snips/typescript/on_event/event.ts | 17 - .../snips/typescript/on_event/filter.ts | 25 - .../snips/typescript/on_event/index.ts | 11 - .../snips/typescript/on_event/worker.ts | 12 - .../snips/typescript/on_event/workflow.ts | 25 - .../snips/typescript/on_failure/index.ts | 7 - .../snips/typescript/on_failure/run.ts | 12 - .../snips/typescript/on_failure/worker.ts | 12 - .../snips/typescript/on_failure/workflow.ts | 17 - .../snips/typescript/on_success/index.ts | 7 - .../snips/typescript/on_success/run.ts | 12 - .../snips/typescript/on_success/worker.ts | 12 - .../snips/typescript/on_success/workflow.ts | 17 - .../snips/typescript/priority/index.ts | 7 - .../snips/typescript/priority/run.ts | 21 - .../snips/typescript/priority/worker.ts | 12 - .../snips/typescript/priority/workflow.ts | 21 - .../snips/typescript/quickstart/gitignore.ts | 12 - .../typescript/quickstart/hatchet-client.ts | 17 - .../snips/typescript/quickstart/index.ts | 11 - .../snips/typescript/quickstart/run.ts | 12 - .../snips/typescript/quickstart/worker.ts | 12 - .../quickstart/workflows/first-task.ts | 12 - .../typescript/quickstart/workflows/index.ts | 3 - .../snips/typescript/rate_limit/index.ts | 3 - .../snips/typescript/rate_limit/workflow.ts | 25 - .../snips/typescript/retries/index.ts | 7 - .../generated/snips/typescript/retries/run.ts | 12 - .../snips/typescript/retries/worker.ts | 12 - .../snips/typescript/retries/workflow.ts | 25 - .../generated/snips/typescript/simple/bulk.ts | 21 - .../snips/typescript/simple/client-run.ts | 17 - .../generated/snips/typescript/simple/cron.ts | 25 - .../snips/typescript/simple/delay.ts | 12 - .../snips/typescript/simple/enqueue.ts | 21 - .../snips/typescript/simple/index.ts | 23 - .../generated/snips/typescript/simple/run.ts | 25 - .../snips/typescript/simple/schedule.ts | 25 - .../snips/typescript/simple/stub-workflow.ts | 17 - .../snips/typescript/simple/worker.ts | 17 - .../typescript/simple/workflow-with-child.ts | 17 - .../snips/typescript/simple/workflow.ts | 17 - .../snips/typescript/sticky/index.ts | 7 - .../generated/snips/typescript/sticky/run.ts | 12 - .../snips/typescript/sticky/worker.ts | 12 - .../snips/typescript/sticky/workflow.ts | 17 - .../snips/typescript/streaming/index.ts | 9 - .../typescript/streaming/nextjs-proxy.ts | 17 - .../snips/typescript/streaming/run.ts | 17 - .../snips/typescript/streaming/worker.ts | 12 - .../snips/typescript/streaming/workflow.ts | 17 - .../snips/typescript/timeouts/index.ts | 7 - .../snips/typescript/timeouts/run.ts | 17 - .../snips/typescript/timeouts/worker.ts | 17 - .../snips/typescript/timeouts/workflow.ts | 17 - .../snips/typescript/with_timeouts/index.ts | 3 - .../typescript/with_timeouts/workflow.ts | 21 - frontend/app/src/next/lib/docs/snips.ts | 5 - frontend/app/src/next/lib/docs/sync-docs.ts | 332 -- frontend/docs/components/code/Snippet.tsx | 26 +- .../snips/go/bulk-operations/index.ts | 3 - .../snips/go/bulk-operations/main.ts | 28 - frontend/docs/lib/generated/snips/go/index.ts | 17 - .../go/migration-guides/hatchet-client.ts | 11 - .../snips/go/migration-guides/index.ts | 5 - .../snips/go/migration-guides/mergent.ts | 40 - .../snips/go/quickstart/cmd/index.ts | 5 - .../snips/go/quickstart/cmd/run/index.ts | 3 - .../snips/go/quickstart/cmd/run/main.ts | 11 - .../snips/go/quickstart/cmd/worker/index.ts | 3 - .../snips/go/quickstart/cmd/worker/main.ts | 11 - .../hatchet_client/hatchet_client.ts | 11 - .../go/quickstart/hatchet_client/index.ts | 3 - .../generated/snips/go/quickstart/index.ts | 7 - .../go/quickstart/workflows/first_task.ts | 11 - .../snips/go/quickstart/workflows/index.ts | 3 - .../docs/lib/generated/snips/go/run/all.ts | 11 - .../docs/lib/generated/snips/go/run/bulk.ts | 16 - .../docs/lib/generated/snips/go/run/cron.ts | 24 - .../docs/lib/generated/snips/go/run/event.ts | 28 - .../docs/lib/generated/snips/go/run/index.ts | 13 - .../lib/generated/snips/go/run/priority.ts | 20 - .../docs/lib/generated/snips/go/run/simple.ts | 28 - .../snips/go/streaming/consumer/index.ts | 3 - .../snips/go/streaming/consumer/main.ts | 16 - .../lib/generated/snips/go/streaming/index.ts | 9 - .../snips/go/streaming/server/index.ts | 3 - .../snips/go/streaming/server/main.ts | 16 - .../snips/go/streaming/shared/index.ts | 3 - .../snips/go/streaming/shared/task.ts | 16 - .../snips/go/streaming/worker/index.ts | 3 - .../snips/go/streaming/worker/main.ts | 11 - .../lib/generated/snips/go/worker/index.ts | 3 - .../lib/generated/snips/go/worker/start.ts | 11 - .../snips/go/workflows/cancellations.ts | 16 - .../snips/go/workflows/child-workflows.ts | 11 - .../snips/go/workflows/complex-conditions.ts | 40 - .../snips/go/workflows/concurrency-rr.ts | 20 - .../snips/go/workflows/dag-with-conditions.ts | 11 - .../lib/generated/snips/go/workflows/dag.ts | 24 - .../snips/go/workflows/durable-event.ts | 20 - .../snips/go/workflows/durable-sleep.ts | 16 - .../lib/generated/snips/go/workflows/index.ts | 37 - .../snips/go/workflows/non-retryable-error.ts | 16 - .../generated/snips/go/workflows/on-cron.ts | 16 - .../generated/snips/go/workflows/on-event.ts | 24 - .../snips/go/workflows/on-failure.ts | 11 - .../generated/snips/go/workflows/priority.ts | 20 - .../generated/snips/go/workflows/ratelimit.ts | 24 - .../generated/snips/go/workflows/retries.ts | 24 - .../generated/snips/go/workflows/simple.ts | 28 - .../generated/snips/go/workflows/sticky.ts | 11 - .../generated/snips/go/workflows/timeouts.ts | 20 - .../go/z_v0/assignment-affinity/index.ts | 5 - .../snips/go/z_v0/assignment-affinity/main.ts | 11 - .../snips/go/z_v0/assignment-affinity/run.ts | 11 - .../snips/go/z_v0/assignment-sticky/index.ts | 5 - .../snips/go/z_v0/assignment-sticky/main.ts | 11 - .../snips/go/z_v0/assignment-sticky/run.ts | 20 - .../snips/go/z_v0/bulk_imports/index.ts | 3 - .../snips/go/z_v0/bulk_imports/main.ts | 11 - .../snips/go/z_v0/bulk_workflows/index.ts | 5 - .../snips/go/z_v0/bulk_workflows/main.ts | 11 - .../snips/go/z_v0/bulk_workflows/run.ts | 11 - .../snips/go/z_v0/cancellation/index.ts | 5 - .../snips/go/z_v0/cancellation/main.ts | 11 - .../snips/go/z_v0/cancellation/run.ts | 11 - .../generated/snips/go/z_v0/compute/index.ts | 3 - .../generated/snips/go/z_v0/compute/main.ts | 11 - .../snips/go/z_v0/concurrency/index.ts | 3 - .../snips/go/z_v0/concurrency/main.ts | 11 - .../snips/go/z_v0/cron-programmatic/index.ts | 3 - .../snips/go/z_v0/cron-programmatic/main.ts | 24 - .../lib/generated/snips/go/z_v0/cron/index.ts | 3 - .../lib/generated/snips/go/z_v0/cron/main.ts | 16 - .../lib/generated/snips/go/z_v0/dag/index.ts | 3 - .../lib/generated/snips/go/z_v0/dag/main.ts | 11 - .../snips/go/z_v0/deprecated/index.ts | 9 - .../z_v0/deprecated/requeue/.hatchet/index.ts | 3 - .../requeue/.hatchet/job-requeue-workflow.ts | 11 - .../snips/go/z_v0/deprecated/requeue/index.ts | 5 - .../snips/go/z_v0/deprecated/requeue/main.ts | 11 - .../schedule-timeout/.hatchet/index.ts | 3 - .../.hatchet/schedule-timeout-workflow.ts | 11 - .../z_v0/deprecated/schedule-timeout/index.ts | 5 - .../z_v0/deprecated/schedule-timeout/main.ts | 11 - .../z_v0/deprecated/timeout/.hatchet/index.ts | 5 - .../timeout/.hatchet/job-timeout-workflow.ts | 11 - .../timeout/.hatchet/step-timeout-workflow.ts | 11 - .../snips/go/z_v0/deprecated/timeout/index.ts | 5 - .../snips/go/z_v0/deprecated/timeout/main.ts | 11 - .../go/z_v0/deprecated/yaml/.hatchet/index.ts | 3 - .../yaml/.hatchet/sample-workflow.ts | 11 - .../snips/go/z_v0/deprecated/yaml/README.ts | 11 - .../snips/go/z_v0/deprecated/yaml/index.ts | 7 - .../snips/go/z_v0/deprecated/yaml/main.ts | 11 - .../snips/go/z_v0/errors-test/index.ts | 3 - .../snips/go/z_v0/errors-test/main.ts | 11 - .../docs/lib/generated/snips/go/z_v0/index.ts | 61 - .../cancel-in-progress/index.ts | 3 - .../cancel-in-progress/main.ts | 11 - .../group-round-robin-advanced/index.ts | 3 - .../group-round-robin-advanced/main.ts | 11 - .../group-round-robin/index.ts | 3 - .../group-round-robin/main.ts | 11 - .../snips/go/z_v0/limit-concurrency/index.ts | 7 - .../generated/snips/go/z_v0/logging/index.ts | 3 - .../generated/snips/go/z_v0/logging/main.ts | 11 - .../snips/go/z_v0/manual-trigger/index.ts | 5 - .../go/z_v0/manual-trigger/trigger/index.ts | 3 - .../go/z_v0/manual-trigger/trigger/main.ts | 11 - .../go/z_v0/manual-trigger/worker/index.ts | 3 - .../go/z_v0/manual-trigger/worker/main.ts | 11 - .../snips/go/z_v0/middleware/index.ts | 5 - .../snips/go/z_v0/middleware/main.ts | 11 - .../generated/snips/go/z_v0/middleware/run.ts | 11 - .../snips/go/z_v0/namespaced/index.ts | 3 - .../snips/go/z_v0/namespaced/main.ts | 11 - .../generated/snips/go/z_v0/no-tls/index.ts | 3 - .../generated/snips/go/z_v0/no-tls/main.ts | 11 - .../snips/go/z_v0/on-failure/index.ts | 3 - .../snips/go/z_v0/on-failure/main.ts | 16 - .../snips/go/z_v0/procedural/index.ts | 3 - .../snips/go/z_v0/procedural/main.ts | 11 - .../snips/go/z_v0/rate-limit/index.ts | 3 - .../snips/go/z_v0/rate-limit/main.ts | 11 - .../snips/go/z_v0/register-action/index.ts | 3 - .../snips/go/z_v0/register-action/main.ts | 11 - .../go/z_v0/retries-with-backoff/index.ts | 3 - .../go/z_v0/retries-with-backoff/main.ts | 16 - .../generated/snips/go/z_v0/retries/index.ts | 3 - .../generated/snips/go/z_v0/retries/main.ts | 11 - .../snips/go/z_v0/scheduled/index.ts | 3 - .../generated/snips/go/z_v0/scheduled/main.ts | 24 - .../generated/snips/go/z_v0/simple/index.ts | 3 - .../generated/snips/go/z_v0/simple/main.ts | 11 - .../go/z_v0/stream-event-by-meta/index.ts | 3 - .../go/z_v0/stream-event-by-meta/main.ts | 11 - .../snips/go/z_v0/stream-event/index.ts | 3 - .../snips/go/z_v0/stream-event/main.ts | 11 - .../generated/snips/go/z_v0/timeout/index.ts | 5 - .../generated/snips/go/z_v0/timeout/main.ts | 16 - .../generated/snips/go/z_v0/timeout/run.ts | 11 - .../generated/snips/go/z_v0/webhook/index.ts | 5 - .../generated/snips/go/z_v0/webhook/main.ts | 11 - .../generated/snips/go/z_v0/webhook/run.ts | 11 - frontend/docs/lib/generated/snips/index.ts | 9 - .../lib/generated/snips/python/__init__.ts | 11 - .../snips/python/affinity_workers/index.ts | 5 - .../snips/python/affinity_workers/trigger.ts | 11 - .../snips/python/affinity_workers/worker.ts | 24 - .../lib/generated/snips/python/api/api.ts | 11 - .../generated/snips/python/api/async_api.ts | 11 - .../lib/generated/snips/python/api/index.ts | 5 - .../blocked_async/blocking_example_trigger.ts | 16 - .../blocked_async/blocking_example_worker.ts | 16 - .../snips/python/blocked_async/debugging.ts | 24 - .../snips/python/blocked_async/index.ts | 11 - .../snips/python/blocked_async/trigger.ts | 11 - .../snips/python/blocked_async/worker.ts | 11 - .../snips/python/bulk_fanout/bulk_trigger.ts | 11 - .../snips/python/bulk_fanout/index.ts | 11 - .../snips/python/bulk_fanout/stream.ts | 11 - .../python/bulk_fanout/test_bulk_fanout.ts | 11 - .../snips/python/bulk_fanout/trigger.ts | 11 - .../snips/python/bulk_fanout/worker.ts | 16 - .../snips/python/bulk_operations/cancel.ts | 28 - .../snips/python/bulk_operations/index.ts | 9 - .../snips/python/bulk_operations/replay.ts | 28 - .../bulk_operations/test_bulk_replay.ts | 11 - .../snips/python/bulk_operations/worker.ts | 11 - .../snips/python/cancellation/index.ts | 7 - .../python/cancellation/test_cancellation.ts | 11 - .../snips/python/cancellation/trigger.ts | 11 - .../snips/python/cancellation/worker.ts | 20 - .../lib/generated/snips/python/child/bulk.ts | 24 - .../lib/generated/snips/python/child/index.ts | 9 - .../snips/python/child/simple-fanout.ts | 16 - .../generated/snips/python/child/trigger.ts | 28 - .../generated/snips/python/child/worker.ts | 16 - .../snips/python/concurrency_limit/index.ts | 5 - .../snips/python/concurrency_limit/trigger.ts | 11 - .../snips/python/concurrency_limit/worker.ts | 16 - .../python/concurrency_limit_rr/index.ts | 7 - .../test_concurrency_limit_rr.ts | 11 - .../python/concurrency_limit_rr/trigger.ts | 11 - .../python/concurrency_limit_rr/worker.ts | 16 - .../python/concurrency_limit_rr_load/event.ts | 11 - .../python/concurrency_limit_rr_load/index.ts | 5 - .../concurrency_limit_rr_load/worker.ts | 11 - .../python/concurrency_multiple_keys/index.ts | 5 - .../test_multiple_concurrency_keys.ts | 11 - .../concurrency_multiple_keys/worker.ts | 16 - .../concurrency_workflow_level/index.ts | 5 - .../test_workflow_level_concurrency.ts | 11 - .../concurrency_workflow_level/worker.ts | 16 - .../snips/python/conditions/index.ts | 7 - .../python/conditions/test_conditions.ts | 11 - .../snips/python/conditions/trigger.ts | 11 - .../snips/python/conditions/worker.ts | 44 - .../lib/generated/snips/python/cron/index.ts | 7 - .../snips/python/cron/programatic-async.ts | 28 - .../snips/python/cron/programatic-sync.ts | 28 - .../snips/python/cron/workflow-definition.ts | 16 - .../lib/generated/snips/python/dag/index.ts | 7 - .../generated/snips/python/dag/test_dag.ts | 11 - .../lib/generated/snips/python/dag/trigger.ts | 11 - .../lib/generated/snips/python/dag/worker.ts | 11 - .../generated/snips/python/dedupe/index.ts | 3 - .../generated/snips/python/dedupe/worker.ts | 11 - .../generated/snips/python/delayed/index.ts | 7 - .../snips/python/delayed/test_delayed.ts | 11 - .../generated/snips/python/delayed/trigger.ts | 11 - .../generated/snips/python/delayed/worker.ts | 11 - .../python/dependency_injection/index.ts | 5 - .../test_dependency_injection.ts | 11 - .../python/dependency_injection/worker.ts | 20 - .../generated/snips/python/durable/index.ts | 7 - .../snips/python/durable/test_durable.ts | 11 - .../generated/snips/python/durable/trigger.ts | 11 - .../generated/snips/python/durable/worker.ts | 24 - .../snips/python/durable_event/index.ts | 5 - .../snips/python/durable_event/trigger.ts | 11 - .../snips/python/durable_event/worker.ts | 20 - .../snips/python/durable_sleep/index.ts | 5 - .../snips/python/durable_sleep/trigger.ts | 11 - .../snips/python/durable_sleep/worker.ts | 16 - .../generated/snips/python/events/event.ts | 16 - .../generated/snips/python/events/filter.ts | 24 - .../generated/snips/python/events/index.ts | 9 - .../snips/python/events/test_event.ts | 11 - .../generated/snips/python/events/worker.ts | 24 - .../generated/snips/python/fanout/index.ts | 11 - .../generated/snips/python/fanout/stream.ts | 11 - .../snips/python/fanout/sync_stream.ts | 11 - .../snips/python/fanout/test_fanout.ts | 11 - .../generated/snips/python/fanout/trigger.ts | 11 - .../generated/snips/python/fanout/worker.ts | 20 - .../snips/python/fanout_sync/index.ts | 7 - .../python/fanout_sync/test_fanout_sync.ts | 11 - .../snips/python/fanout_sync/trigger.ts | 11 - .../snips/python/fanout_sync/worker.ts | 11 - .../generated/snips/python/hatchet_client.ts | 11 - .../docs/lib/generated/snips/python/index.ts | 99 - .../generated/snips/python/lifespans/index.ts | 9 - .../snips/python/lifespans/simple.ts | 16 - .../snips/python/lifespans/test_lifespans.ts | 11 - .../snips/python/lifespans/trigger.ts | 11 - .../snips/python/lifespans/worker.ts | 20 - .../generated/snips/python/logger/client.ts | 16 - .../generated/snips/python/logger/index.ts | 11 - .../snips/python/logger/test_logger.ts | 11 - .../generated/snips/python/logger/trigger.ts | 11 - .../generated/snips/python/logger/worker.ts | 11 - .../generated/snips/python/logger/workflow.ts | 20 - .../snips/python/manual_slot_release/index.ts | 3 - .../python/manual_slot_release/worker.ts | 16 - .../snips/python/migration_guides/__init__.ts | 11 - .../python/migration_guides/hatchet_client.ts | 11 - .../snips/python/migration_guides/index.ts | 7 - .../snips/python/migration_guides/mergent.ts | 36 - .../snips/python/non_retryable/index.ts | 7 - .../python/non_retryable/test_no_retry.ts | 11 - .../snips/python/non_retryable/trigger.ts | 11 - .../snips/python/non_retryable/worker.ts | 16 - .../snips/python/on_failure/index.ts | 7 - .../python/on_failure/test_on_failure.ts | 11 - .../snips/python/on_failure/trigger.ts | 11 - .../snips/python/on_failure/worker.ts | 20 - .../snips/python/on_success/index.ts | 5 - .../snips/python/on_success/trigger.ts | 11 - .../snips/python/on_success/worker.ts | 11 - .../opentelemetry_instrumentation/client.ts | 11 - .../opentelemetry_instrumentation/index.ts | 11 - .../langfuse/client.ts | 20 - .../langfuse/index.ts | 7 - .../langfuse/trigger.ts | 16 - .../langfuse/worker.ts | 16 - .../opentelemetry_instrumentation/tracer.ts | 11 - .../opentelemetry_instrumentation/triggers.ts | 11 - .../opentelemetry_instrumentation/worker.ts | 11 - .../generated/snips/python/priority/index.ts | 7 - .../snips/python/priority/test_priority.ts | 11 - .../snips/python/priority/trigger.ts | 20 - .../generated/snips/python/priority/worker.ts | 16 - .../snips/python/quickstart/README.ts | 11 - .../snips/python/quickstart/__init__.ts | 11 - .../snips/python/quickstart/gitignore.ts | 11 - .../snips/python/quickstart/hatchet_client.ts | 11 - .../snips/python/quickstart/index.ts | 19 - .../snips/python/quickstart/poetry.ts | 11 - .../snips/python/quickstart/pyproject.ts | 11 - .../generated/snips/python/quickstart/run.ts | 11 - .../snips/python/quickstart/worker.ts | 11 - .../python/quickstart/workflows/__init__.ts | 11 - .../python/quickstart/workflows/first_task.ts | 11 - .../python/quickstart/workflows/index.ts | 5 - .../snips/python/rate_limit/dynamic.ts | 11 - .../snips/python/rate_limit/index.ts | 7 - .../snips/python/rate_limit/trigger.ts | 11 - .../snips/python/rate_limit/worker.ts | 24 - .../generated/snips/python/retries/index.ts | 3 - .../generated/snips/python/retries/worker.ts | 24 - .../snips/python/return_exceptions/index.ts | 5 - .../test_return_exceptions.ts | 11 - .../snips/python/return_exceptions/worker.ts | 11 - .../generated/snips/python/scheduled/index.ts | 5 - .../python/scheduled/programatic-async.ts | 28 - .../python/scheduled/programatic-sync.ts | 28 - .../generated/snips/python/simple/index.ts | 7 - .../python/simple/test_simple_workflow.ts | 11 - .../generated/snips/python/simple/trigger.ts | 11 - .../generated/snips/python/simple/worker.ts | 16 - .../snips/python/sticky_workers/event.ts | 11 - .../snips/python/sticky_workers/index.ts | 5 - .../snips/python/sticky_workers/worker.ts | 20 - .../snips/python/streaming/async_stream.ts | 16 - .../snips/python/streaming/fastapi_proxy.ts | 16 - .../generated/snips/python/streaming/index.ts | 9 - .../snips/python/streaming/test_streaming.ts | 11 - .../snips/python/streaming/worker.ts | 16 - .../generated/snips/python/timeout/index.ts | 7 - .../snips/python/timeout/test_timeout.ts | 11 - .../generated/snips/python/timeout/trigger.ts | 11 - .../generated/snips/python/timeout/worker.ts | 24 - .../snips/python/unit_testing/index.ts | 5 - .../snips/python/unit_testing/test_unit.ts | 11 - .../snips/python/unit_testing/workflows.ts | 11 - .../generated/snips/python/webhooks/index.ts | 5 - .../snips/python/webhooks/test_webhooks.ts | 11 - .../generated/snips/python/webhooks/worker.ts | 16 - .../docs/lib/generated/snips/python/worker.ts | 11 - .../python/worker_existing_loop/index.ts | 3 - .../python/worker_existing_loop/worker.ts | 11 - .../python/workflow_registration/index.ts | 3 - .../python/workflow_registration/worker.ts | 16 - frontend/docs/lib/generated/snips/types.ts | 31 - .../snips/typescript/cancellations/index.ts | 7 - .../snips/typescript/cancellations/run.ts | 16 - .../snips/typescript/cancellations/worker.ts | 16 - .../typescript/cancellations/workflow.ts | 20 - .../snips/typescript/child_workflows/index.ts | 7 - .../snips/typescript/child_workflows/run.ts | 11 - .../typescript/child_workflows/worker.ts | 11 - .../typescript/child_workflows/workflow.ts | 20 - .../snips/typescript/concurrency-rr/index.ts | 9 - .../snips/typescript/concurrency-rr/load.ts | 11 - .../snips/typescript/concurrency-rr/run.ts | 11 - .../snips/typescript/concurrency-rr/worker.ts | 11 - .../typescript/concurrency-rr/workflow.ts | 20 - .../generated/snips/typescript/dag/index.ts | 9 - .../typescript/dag/interface-workflow.ts | 11 - .../lib/generated/snips/typescript/dag/run.ts | 11 - .../generated/snips/typescript/dag/worker.ts | 11 - .../snips/typescript/dag/workflow.ts | 16 - .../dag_match_condition/complex-workflow.ts | 40 - .../typescript/dag_match_condition/event.ts | 11 - .../typescript/dag_match_condition/index.ts | 11 - .../typescript/dag_match_condition/run.ts | 11 - .../typescript/dag_match_condition/worker.ts | 11 - .../dag_match_condition/workflow.ts | 11 - .../generated/snips/typescript/deep/index.ts | 7 - .../generated/snips/typescript/deep/run.ts | 11 - .../generated/snips/typescript/deep/worker.ts | 11 - .../snips/typescript/deep/workflow.ts | 11 - .../snips/typescript/durable-event/event.ts | 11 - .../snips/typescript/durable-event/index.ts | 9 - .../snips/typescript/durable-event/run.ts | 11 - .../snips/typescript/durable-event/worker.ts | 11 - .../typescript/durable-event/workflow.ts | 20 - .../snips/typescript/durable-sleep/event.ts | 11 - .../snips/typescript/durable-sleep/index.ts | 9 - .../snips/typescript/durable-sleep/run.ts | 11 - .../snips/typescript/durable-sleep/worker.ts | 11 - .../typescript/durable-sleep/workflow.ts | 16 - .../snips/typescript/hatchet-client.ts | 11 - .../typescript/high-memory/child-worker.ts | 11 - .../snips/typescript/high-memory/index.ts | 9 - .../typescript/high-memory/parent-worker.ts | 11 - .../snips/typescript/high-memory/run.ts | 11 - .../high-memory/workflow-with-child.ts | 11 - .../lib/generated/snips/typescript/index.ts | 59 - .../snips/typescript/inferred-typing/index.ts | 7 - .../snips/typescript/inferred-typing/run.ts | 11 - .../typescript/inferred-typing/worker.ts | 11 - .../typescript/inferred-typing/workflow.ts | 11 - .../landing_page/durable-excution.ts | 20 - .../landing_page/event-signaling.ts | 16 - .../typescript/landing_page/flow-control.ts | 16 - .../snips/typescript/landing_page/index.ts | 13 - .../snips/typescript/landing_page/queues.ts | 20 - .../typescript/landing_page/scheduling.ts | 16 - .../typescript/landing_page/task-routing.ts | 16 - .../snips/typescript/legacy/index.ts | 7 - .../generated/snips/typescript/legacy/run.ts | 11 - .../snips/typescript/legacy/worker.ts | 11 - .../snips/typescript/legacy/workflow.ts | 11 - .../migration-guides/hatchet-client.ts | 11 - .../typescript/migration-guides/index.ts | 5 - .../typescript/migration-guides/mergent.ts | 36 - .../multiple_wf_concurrency/index.ts | 7 - .../typescript/multiple_wf_concurrency/run.ts | 11 - .../multiple_wf_concurrency/worker.ts | 11 - .../multiple_wf_concurrency/workflow.ts | 16 - .../snips/typescript/non_retryable/index.ts | 7 - .../snips/typescript/non_retryable/run.ts | 11 - .../snips/typescript/non_retryable/worker.ts | 11 - .../typescript/non_retryable/workflow.ts | 16 - .../snips/typescript/on_cron/index.ts | 5 - .../snips/typescript/on_cron/worker.ts | 11 - .../snips/typescript/on_cron/workflow.ts | 16 - .../snips/typescript/on_event/event.e2e.ts | 11 - .../snips/typescript/on_event/event.ts | 16 - .../snips/typescript/on_event/filter.ts | 24 - .../snips/typescript/on_event/index.ts | 11 - .../snips/typescript/on_event/worker.ts | 11 - .../snips/typescript/on_event/workflow.ts | 24 - .../snips/typescript/on_failure/index.ts | 7 - .../snips/typescript/on_failure/run.ts | 11 - .../snips/typescript/on_failure/worker.ts | 11 - .../snips/typescript/on_failure/workflow.ts | 16 - .../snips/typescript/on_success/index.ts | 7 - .../snips/typescript/on_success/run.ts | 11 - .../snips/typescript/on_success/worker.ts | 11 - .../snips/typescript/on_success/workflow.ts | 16 - .../snips/typescript/priority/index.ts | 7 - .../snips/typescript/priority/run.ts | 20 - .../snips/typescript/priority/worker.ts | 11 - .../snips/typescript/priority/workflow.ts | 20 - .../snips/typescript/quickstart/gitignore.ts | 11 - .../typescript/quickstart/hatchet-client.ts | 20 - .../snips/typescript/quickstart/index.ts | 11 - .../snips/typescript/quickstart/run.ts | 11 - .../snips/typescript/quickstart/worker.ts | 11 - .../quickstart/workflows/first-task.ts | 11 - .../typescript/quickstart/workflows/index.ts | 3 - .../snips/typescript/rate_limit/index.ts | 3 - .../snips/typescript/rate_limit/workflow.ts | 24 - .../snips/typescript/retries/index.ts | 7 - .../generated/snips/typescript/retries/run.ts | 11 - .../snips/typescript/retries/worker.ts | 11 - .../snips/typescript/retries/workflow.ts | 24 - .../generated/snips/typescript/simple/bulk.ts | 20 - .../snips/typescript/simple/client-run.ts | 16 - .../generated/snips/typescript/simple/cron.ts | 24 - .../snips/typescript/simple/delay.ts | 11 - .../snips/typescript/simple/enqueue.ts | 20 - .../snips/typescript/simple/index.ts | 23 - .../generated/snips/typescript/simple/run.ts | 24 - .../snips/typescript/simple/schedule.ts | 24 - .../snips/typescript/simple/stub-workflow.ts | 16 - .../snips/typescript/simple/worker.ts | 16 - .../typescript/simple/workflow-with-child.ts | 16 - .../snips/typescript/simple/workflow.ts | 16 - .../snips/typescript/sticky/index.ts | 7 - .../generated/snips/typescript/sticky/run.ts | 11 - .../snips/typescript/sticky/worker.ts | 11 - .../snips/typescript/sticky/workflow.ts | 16 - .../snips/typescript/streaming/index.ts | 9 - .../typescript/streaming/nextjs-proxy.ts | 16 - .../snips/typescript/streaming/run.ts | 16 - .../snips/typescript/streaming/worker.ts | 11 - .../snips/typescript/streaming/workflow.ts | 16 - .../snips/typescript/timeouts/index.ts | 7 - .../snips/typescript/timeouts/run.ts | 16 - .../snips/typescript/timeouts/worker.ts | 16 - .../snips/typescript/timeouts/workflow.ts | 16 - .../snips/typescript/with_timeouts/index.ts | 3 - .../typescript/with_timeouts/workflow.ts | 20 - frontend/docs/package.json | 4 +- .../docs/pages/home/compute/auto-scaling.mdx | 2 - frontend/docs/pages/home/compute/cpu.mdx | 3 - .../home/compute/environment-variables.mdx | 5 - frontend/docs/pages/home/compute/git-ops.mdx | 7 - frontend/docs/pages/home/compute/gpu.mdx | 3 - .../pages/self-hosting/prometheus-metrics.mdx | 5 - frontend/docs/pnpm-lock.yaml | 944 ++--- frontend/snips/package.json | 15 +- frontend/snips/pnpm-lock.yaml | 40 +- .../processors/snippets/snippet.processor.ts | 17 +- frontend/snips/src/scripts/build-tree.test.ts | 2 +- frontend/snips/src/scripts/build-tree.ts | 6 +- frontend/snips/src/scripts/clean-build.ts | 4 +- frontend/snips/tsconfig.json | 3 +- hack/oas/generate-clients.sh | 2 +- 981 files changed, 2365 insertions(+), 15287 deletions(-) create mode 100644 .github/workflows/gen-examples.yml create mode 100644 examples/python/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress.py create mode 100644 examples/python/concurrency_cancel_in_progress/worker.py create mode 100644 examples/python/concurrency_cancel_newest/test_concurrency_cancel_newest.py create mode 100644 examples/python/concurrency_cancel_newest/worker.py delete mode 100644 frontend/app/src/next/lib/docs/generated/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/blog/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/contributing/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/home/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/home/compute/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/sdks/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/sdks/python/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/sdks/python/feature-clients/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/self-hosting/_meta.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/bulk-operations/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/bulk-operations/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/hatchet-client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/mergent.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/run/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/run/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/worker/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/worker/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/hatchet_client/hatchet_client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/hatchet_client/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/workflows/first_task.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/quickstart/workflows/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/run/all.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/run/bulk.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/run/cron.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/run/event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/run/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/run/priority.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/run/simple.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/consumer/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/consumer/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/server/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/server/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/shared/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/shared/task.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/worker/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/streaming/worker/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/worker/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/worker/start.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/cancellations.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/child-workflows.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/complex-conditions.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/concurrency-rr.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/dag-with-conditions.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/dag.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/durable-event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/durable-sleep.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/non-retryable-error.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-cron.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-failure.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/priority.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/ratelimit.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/retries.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/simple.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/sticky.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/workflows/timeouts.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_imports/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_imports/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/concurrency/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/concurrency/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron-programmatic/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron-programmatic/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/.hatchet/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/.hatchet/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/README.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/trigger/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/trigger/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/worker/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/worker/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/no-tls/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/no-tls/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/on-failure/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/on-failure/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/procedural/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/procedural/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/rate-limit/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/rate-limit/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries-with-backoff/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries-with-backoff/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/scheduled/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/scheduled/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event-by-meta/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event-by-meta/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/main.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/__init__.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/api/api.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/api/async_api.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/api/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/blocking_example_trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/blocking_example_worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/debugging.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/bulk_trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/stream.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/test_bulk_fanout.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/cancel.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/replay.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/test_bulk_replay.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/cancellation/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/cancellation/test_cancellation.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/cancellation/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/cancellation/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/child/bulk.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/child/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/child/simple-fanout.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/child/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/child/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/test_concurrency_limit_rr.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/test_multiple_concurrency_keys.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/test_workflow_level_concurrency.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/conditions/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/conditions/test_conditions.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/conditions/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/conditions/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/cron/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/cron/programatic-async.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/cron/programatic-sync.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/cron/workflow-definition.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dag/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dag/test_dag.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dag/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dag/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dedupe/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dedupe/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/delayed/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/delayed/test_delayed.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/delayed/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/delayed/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/test_dependency_injection.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable/test_durable.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable_event/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable_event/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable_event/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/events/event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/events/filter.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/events/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/events/test_event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/events/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout/stream.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout/sync_stream.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout/test_fanout.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/test_fanout_sync.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/hatchet_client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/lifespans/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/lifespans/simple.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/lifespans/test_lifespans.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/lifespans/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/lifespans/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/logger/client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/logger/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/logger/test_logger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/logger/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/logger/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/logger/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/manual_slot_release/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/manual_slot_release/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/__init__.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/hatchet_client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/mergent.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/test_no_retry.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/on_failure/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/on_failure/test_on_failure.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/on_failure/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/on_failure/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/on_success/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/on_success/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/on_success/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/tracer.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/triggers.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/priority/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/priority/test_priority.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/priority/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/priority/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/README.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/__init__.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/gitignore.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/hatchet_client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/poetry.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/pyproject.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/__init__.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/first_task.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/dynamic.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/retries/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/retries/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/test_return_exceptions.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/scheduled/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/scheduled/programatic-async.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/scheduled/programatic-sync.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/simple/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/simple/test_simple_workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/simple/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/simple/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/streaming/async_stream.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/streaming/fastapi_proxy.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/streaming/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/streaming/test_streaming.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/streaming/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/timeout/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/timeout/test_timeout.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/timeout/trigger.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/timeout/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/test_unit.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/workflows.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/webhooks/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/webhooks/test_webhooks.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/webhooks/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/worker_existing_loop/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/worker_existing_loop/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/workflow_registration/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/workflow_registration/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/types.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/load.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag/interface-workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/complex-workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/deep/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/deep/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/deep/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/deep/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/hatchet-client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/child-worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/parent-worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/workflow-with-child.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/durable-excution.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/event-signaling.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/flow-control.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/queues.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/scheduling.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/task-routing.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/hatchet-client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/mergent.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.e2e.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/filter.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/priority/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/priority/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/priority/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/priority/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/gitignore.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/hatchet-client.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/workflows/first-task.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/workflows/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/rate_limit/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/rate_limit/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/retries/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/retries/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/retries/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/retries/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/bulk.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/client-run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/cron.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/delay.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/enqueue.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/schedule.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/stub-workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/workflow-with-child.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/simple/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/nextjs-proxy.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/run.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/worker.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/with_timeouts/index.ts delete mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/with_timeouts/workflow.ts delete mode 100644 frontend/app/src/next/lib/docs/snips.ts delete mode 100644 frontend/app/src/next/lib/docs/sync-docs.ts delete mode 100644 frontend/docs/lib/generated/snips/go/bulk-operations/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/bulk-operations/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/migration-guides/hatchet-client.ts delete mode 100644 frontend/docs/lib/generated/snips/go/migration-guides/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/migration-guides/mergent.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/cmd/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/cmd/run/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/cmd/run/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/cmd/worker/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/cmd/worker/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/hatchet_client/hatchet_client.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/hatchet_client/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/workflows/first_task.ts delete mode 100644 frontend/docs/lib/generated/snips/go/quickstart/workflows/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/run/all.ts delete mode 100644 frontend/docs/lib/generated/snips/go/run/bulk.ts delete mode 100644 frontend/docs/lib/generated/snips/go/run/cron.ts delete mode 100644 frontend/docs/lib/generated/snips/go/run/event.ts delete mode 100644 frontend/docs/lib/generated/snips/go/run/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/run/priority.ts delete mode 100644 frontend/docs/lib/generated/snips/go/run/simple.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/consumer/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/consumer/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/server/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/server/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/shared/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/shared/task.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/worker/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/streaming/worker/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/worker/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/worker/start.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/cancellations.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/child-workflows.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/complex-conditions.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/concurrency-rr.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/dag-with-conditions.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/dag.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/durable-event.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/durable-sleep.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/non-retryable-error.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/on-cron.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/on-event.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/on-failure.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/priority.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/ratelimit.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/retries.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/simple.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/sticky.ts delete mode 100644 frontend/docs/lib/generated/snips/go/workflows/timeouts.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/run.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/run.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/bulk_imports/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/bulk_imports/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/run.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/cancellation/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/cancellation/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/cancellation/run.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/compute/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/compute/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/concurrency/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/concurrency/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/cron-programmatic/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/cron-programmatic/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/cron/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/cron/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/dag/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/dag/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/.hatchet/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/.hatchet/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/README.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/errors-test/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/errors-test/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/logging/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/logging/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/trigger/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/trigger/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/worker/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/worker/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/middleware/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/middleware/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/middleware/run.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/namespaced/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/namespaced/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/no-tls/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/no-tls/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/on-failure/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/on-failure/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/procedural/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/procedural/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/rate-limit/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/rate-limit/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/register-action/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/register-action/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/retries-with-backoff/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/retries-with-backoff/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/retries/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/retries/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/scheduled/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/scheduled/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/simple/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/simple/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/stream-event-by-meta/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/stream-event-by-meta/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/stream-event/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/stream-event/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/timeout/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/timeout/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/timeout/run.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/webhook/index.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/webhook/main.ts delete mode 100644 frontend/docs/lib/generated/snips/go/z_v0/webhook/run.ts delete mode 100644 frontend/docs/lib/generated/snips/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/__init__.ts delete mode 100644 frontend/docs/lib/generated/snips/python/affinity_workers/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/affinity_workers/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/affinity_workers/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/api/api.ts delete mode 100644 frontend/docs/lib/generated/snips/python/api/async_api.ts delete mode 100644 frontend/docs/lib/generated/snips/python/api/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/blocked_async/blocking_example_trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/blocked_async/blocking_example_worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/blocked_async/debugging.ts delete mode 100644 frontend/docs/lib/generated/snips/python/blocked_async/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/blocked_async/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/blocked_async/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_fanout/bulk_trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_fanout/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_fanout/stream.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_fanout/test_bulk_fanout.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_fanout/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_fanout/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_operations/cancel.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_operations/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_operations/replay.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_operations/test_bulk_replay.ts delete mode 100644 frontend/docs/lib/generated/snips/python/bulk_operations/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/cancellation/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/cancellation/test_cancellation.ts delete mode 100644 frontend/docs/lib/generated/snips/python/cancellation/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/cancellation/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/child/bulk.ts delete mode 100644 frontend/docs/lib/generated/snips/python/child/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/child/simple-fanout.ts delete mode 100644 frontend/docs/lib/generated/snips/python/child/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/child/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit_rr/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit_rr/test_concurrency_limit_rr.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit_rr/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit_rr/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/event.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/test_multiple_concurrency_keys.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_workflow_level/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_workflow_level/test_workflow_level_concurrency.ts delete mode 100644 frontend/docs/lib/generated/snips/python/concurrency_workflow_level/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/conditions/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/conditions/test_conditions.ts delete mode 100644 frontend/docs/lib/generated/snips/python/conditions/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/conditions/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/cron/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/cron/programatic-async.ts delete mode 100644 frontend/docs/lib/generated/snips/python/cron/programatic-sync.ts delete mode 100644 frontend/docs/lib/generated/snips/python/cron/workflow-definition.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dag/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dag/test_dag.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dag/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dag/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dedupe/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dedupe/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/delayed/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/delayed/test_delayed.ts delete mode 100644 frontend/docs/lib/generated/snips/python/delayed/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/delayed/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dependency_injection/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dependency_injection/test_dependency_injection.ts delete mode 100644 frontend/docs/lib/generated/snips/python/dependency_injection/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable/test_durable.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable_event/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable_event/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable_event/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable_sleep/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable_sleep/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/durable_sleep/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/events/event.ts delete mode 100644 frontend/docs/lib/generated/snips/python/events/filter.ts delete mode 100644 frontend/docs/lib/generated/snips/python/events/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/events/test_event.ts delete mode 100644 frontend/docs/lib/generated/snips/python/events/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout/stream.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout/sync_stream.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout/test_fanout.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout_sync/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout_sync/test_fanout_sync.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout_sync/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/fanout_sync/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/hatchet_client.ts delete mode 100644 frontend/docs/lib/generated/snips/python/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/lifespans/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/lifespans/simple.ts delete mode 100644 frontend/docs/lib/generated/snips/python/lifespans/test_lifespans.ts delete mode 100644 frontend/docs/lib/generated/snips/python/lifespans/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/lifespans/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/logger/client.ts delete mode 100644 frontend/docs/lib/generated/snips/python/logger/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/logger/test_logger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/logger/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/logger/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/logger/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/python/manual_slot_release/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/manual_slot_release/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/migration_guides/__init__.ts delete mode 100644 frontend/docs/lib/generated/snips/python/migration_guides/hatchet_client.ts delete mode 100644 frontend/docs/lib/generated/snips/python/migration_guides/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/migration_guides/mergent.ts delete mode 100644 frontend/docs/lib/generated/snips/python/non_retryable/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/non_retryable/test_no_retry.ts delete mode 100644 frontend/docs/lib/generated/snips/python/non_retryable/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/non_retryable/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/on_failure/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/on_failure/test_on_failure.ts delete mode 100644 frontend/docs/lib/generated/snips/python/on_failure/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/on_failure/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/on_success/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/on_success/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/on_success/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/client.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/client.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/tracer.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/triggers.ts delete mode 100644 frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/priority/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/priority/test_priority.ts delete mode 100644 frontend/docs/lib/generated/snips/python/priority/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/priority/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/README.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/__init__.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/gitignore.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/hatchet_client.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/poetry.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/pyproject.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/run.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/workflows/__init__.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/workflows/first_task.ts delete mode 100644 frontend/docs/lib/generated/snips/python/quickstart/workflows/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/rate_limit/dynamic.ts delete mode 100644 frontend/docs/lib/generated/snips/python/rate_limit/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/rate_limit/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/rate_limit/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/retries/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/retries/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/return_exceptions/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/return_exceptions/test_return_exceptions.ts delete mode 100644 frontend/docs/lib/generated/snips/python/return_exceptions/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/scheduled/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/scheduled/programatic-async.ts delete mode 100644 frontend/docs/lib/generated/snips/python/scheduled/programatic-sync.ts delete mode 100644 frontend/docs/lib/generated/snips/python/simple/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/simple/test_simple_workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/python/simple/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/simple/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/sticky_workers/event.ts delete mode 100644 frontend/docs/lib/generated/snips/python/sticky_workers/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/sticky_workers/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/streaming/async_stream.ts delete mode 100644 frontend/docs/lib/generated/snips/python/streaming/fastapi_proxy.ts delete mode 100644 frontend/docs/lib/generated/snips/python/streaming/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/streaming/test_streaming.ts delete mode 100644 frontend/docs/lib/generated/snips/python/streaming/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/timeout/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/timeout/test_timeout.ts delete mode 100644 frontend/docs/lib/generated/snips/python/timeout/trigger.ts delete mode 100644 frontend/docs/lib/generated/snips/python/timeout/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/unit_testing/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/unit_testing/test_unit.ts delete mode 100644 frontend/docs/lib/generated/snips/python/unit_testing/workflows.ts delete mode 100644 frontend/docs/lib/generated/snips/python/webhooks/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/webhooks/test_webhooks.ts delete mode 100644 frontend/docs/lib/generated/snips/python/webhooks/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/worker_existing_loop/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/worker_existing_loop/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/python/workflow_registration/index.ts delete mode 100644 frontend/docs/lib/generated/snips/python/workflow_registration/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/types.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/cancellations/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/cancellations/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/cancellations/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/cancellations/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/child_workflows/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/child_workflows/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/child_workflows/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/child_workflows/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/concurrency-rr/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/concurrency-rr/load.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/concurrency-rr/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/concurrency-rr/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/concurrency-rr/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag/interface-workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag_match_condition/complex-workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag_match_condition/event.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag_match_condition/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag_match_condition/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag_match_condition/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/dag_match_condition/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/deep/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/deep/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/deep/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/deep/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-event/event.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-event/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-event/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-event/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-event/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-sleep/event.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-sleep/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-sleep/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-sleep/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/durable-sleep/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/hatchet-client.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/high-memory/child-worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/high-memory/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/high-memory/parent-worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/high-memory/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/high-memory/workflow-with-child.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/inferred-typing/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/inferred-typing/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/inferred-typing/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/inferred-typing/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/landing_page/durable-excution.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/landing_page/event-signaling.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/landing_page/flow-control.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/landing_page/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/landing_page/queues.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/landing_page/scheduling.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/landing_page/task-routing.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/legacy/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/legacy/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/legacy/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/legacy/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/migration-guides/hatchet-client.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/migration-guides/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/migration-guides/mergent.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/non_retryable/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/non_retryable/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/non_retryable/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/non_retryable/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_cron/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_cron/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_cron/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_event/event.e2e.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_event/event.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_event/filter.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_event/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_event/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_event/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_failure/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_failure/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_failure/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_failure/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_success/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_success/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_success/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/on_success/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/priority/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/priority/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/priority/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/priority/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/quickstart/gitignore.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/quickstart/hatchet-client.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/quickstart/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/quickstart/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/quickstart/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/quickstart/workflows/first-task.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/quickstart/workflows/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/rate_limit/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/rate_limit/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/retries/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/retries/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/retries/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/retries/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/bulk.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/client-run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/cron.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/delay.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/enqueue.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/schedule.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/stub-workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/workflow-with-child.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/simple/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/sticky/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/sticky/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/sticky/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/sticky/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/streaming/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/streaming/nextjs-proxy.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/streaming/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/streaming/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/streaming/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/timeouts/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/timeouts/run.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/timeouts/worker.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/timeouts/workflow.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/with_timeouts/index.ts delete mode 100644 frontend/docs/lib/generated/snips/typescript/with_timeouts/workflow.ts diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index e26bc330f..3b4e2d702 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -23,6 +23,9 @@ jobs: - name: Install dependencies working-directory: frontend/docs run: pnpm install --frozen-lockfile + - name: Generate snippets + working-directory: frontend/snips + run: pnpm install --frozen-lockfile && pnpm generate && pnpm run copy:docs - name: Lint working-directory: frontend/docs run: npm run lint:check @@ -45,6 +48,9 @@ jobs: - name: Install dependencies working-directory: frontend/docs run: pnpm install --frozen-lockfile + - name: Generate snippets + working-directory: frontend/snips + run: pnpm install --frozen-lockfile && pnpm generate && pnpm run copy:docs - name: Build working-directory: frontend/docs run: npm run build diff --git a/.github/workflows/gen-examples.yml b/.github/workflows/gen-examples.yml new file mode 100644 index 000000000..af87123b4 --- /dev/null +++ b/.github/workflows/gen-examples.yml @@ -0,0 +1,80 @@ +name: generate examples + +on: + push: + branches: [ main ] + +jobs: + generate: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Node + uses: actions/setup-node@v4 + with: + node-version: '22' + + - uses: pnpm/action-setup@v4 + with: + version: 10 + + - name: Generate + working-directory: frontend/snips + run: pnpm i && pnpm generate && pnpm run copy:examples + + - name: Check for changes in examples directory + id: verify-changed-files + run: | + # Check if there are any changes + if [ -n "$(git status --porcelain)" ]; then + CHANGED_FILES=$(git status --porcelain | awk '{print $2}') + NON_EXAMPLES_CHANGES=$(echo "$CHANGED_FILES" | grep -v "^examples/" || true) + + if [ -n "$NON_EXAMPLES_CHANGES" ]; then + echo "Error: Changes detected outside of examples directory:" + echo "$NON_EXAMPLES_CHANGES" + echo "changed=false" >> $GITHUB_OUTPUT + exit 1 + else + echo "changed=true" >> $GITHUB_OUTPUT + fi + else + echo "changed=false" >> $GITHUB_OUTPUT + fi + + - name: Create branch and commit changes + if: steps.verify-changed-files.outputs.changed == 'true' + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + BRANCH_NAME="regenerate-examples-${{ github.sha }}" + git checkout -b "$BRANCH_NAME" + git add examples/ + git commit -m "chore: regenerate examples" + git push origin "$BRANCH_NAME" + echo "branch_name=$BRANCH_NAME" >> $GITHUB_OUTPUT + id: create-branch + + - name: Create Pull Request + if: steps.verify-changed-files.outputs.changed == 'true' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh pr create \ + --title "chore: regenerate examples" \ + --body "Automated regeneration of examples from the main branch." \ + --head "${{ steps.create-branch.outputs.branch_name }}" \ + --base main + echo "pr_number=$(gh pr list --head ${{ steps.create-branch.outputs.branch_name }} --json number --jq '.[0].number')" >> $GITHUB_OUTPUT + id: create-pr + + - name: Auto-approve and merge Pull Request + if: steps.verify-changed-files.outputs.changed == 'true' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + gh pr review "${{ steps.create-branch.outputs.branch_name }}" --approve + gh pr merge "${{ steps.create-branch.outputs.branch_name }}" --squash diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ac477b017..b68ed8b54 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -37,10 +37,6 @@ jobs: version: 9.15.4 run_install: false - - name: Install Atlas - run: | - curl -sSf https://atlasgo.sh | sh - - name: Compose run: docker compose up -d diff --git a/.gitignore b/.gitignore index 31a362275..897b22e6d 100644 --- a/.gitignore +++ b/.gitignore @@ -95,3 +95,7 @@ rabbitmq.conf certs/ openapitools.json + +# Generated docs content +frontend/app/src/next/lib/docs/generated/ +frontend/docs/lib/generated/ diff --git a/Taskfile.yaml b/Taskfile.yaml index b06c510e9..430e17e02 100644 --- a/Taskfile.yaml +++ b/Taskfile.yaml @@ -55,11 +55,7 @@ tasks: EOF pre: cmds: - # FIXME: Remove this once we have a better way to handle pre-commit reliably - - cd frontend/snips/ && pnpm i && pnpm generate && pnpm run copy:all - - cd frontend/app/ && pnpm run lint:fix - - pre-commit run --all-files - - pre-commit run --all-files + - cd frontend/app/ && pnpm run prettier:fix - pre-commit run --all-files start-db: cmds: diff --git a/examples/python/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress.py b/examples/python/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress.py new file mode 100644 index 000000000..ecf3dbc2e --- /dev/null +++ b/examples/python/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress.py @@ -0,0 +1,47 @@ +import asyncio +import time +from uuid import uuid4 + +import pytest + +from examples.concurrency_cancel_in_progress.worker import ( + WorkflowInput, + concurrency_cancel_in_progress_workflow, +) +from hatchet_sdk import Hatchet, TriggerWorkflowOptions, V1TaskStatus, WorkflowRunRef + + +@pytest.mark.asyncio(loop_scope="session") +async def test_run(hatchet: Hatchet) -> None: + test_run_id = str(uuid4()) + refs: list[WorkflowRunRef] = [] + + for i in range(10): + ref = await concurrency_cancel_in_progress_workflow.aio_run_no_wait( + WorkflowInput(group="A"), + options=TriggerWorkflowOptions( + additional_metadata={"test_run_id": test_run_id, "i": str(i)}, + ), + ) + refs.append(ref) + await asyncio.sleep(1) + + for ref in refs: + print(f"Waiting for run {ref.workflow_run_id} to complete") + try: + await ref.aio_result() + except Exception: + continue + + ## wait for the olap repo to catch up + await asyncio.sleep(5) + + runs = sorted( + hatchet.runs.list(additional_metadata={"test_run_id": test_run_id}).rows, + key=lambda r: int((r.additional_metadata or {}).get("i", "0")), + ) + + assert len(runs) == 10 + assert (runs[-1].additional_metadata or {}).get("i") == "9" + assert runs[-1].status == V1TaskStatus.COMPLETED + assert all(r.status == V1TaskStatus.CANCELLED for r in runs[:-1]) diff --git a/examples/python/concurrency_cancel_in_progress/worker.py b/examples/python/concurrency_cancel_in_progress/worker.py new file mode 100644 index 000000000..f2a9dd653 --- /dev/null +++ b/examples/python/concurrency_cancel_in_progress/worker.py @@ -0,0 +1,39 @@ +import asyncio + +from pydantic import BaseModel + +from hatchet_sdk import ( + ConcurrencyExpression, + ConcurrencyLimitStrategy, + Context, + Hatchet, +) + +hatchet = Hatchet(debug=True) + + +class WorkflowInput(BaseModel): + group: str + + +concurrency_cancel_in_progress_workflow = hatchet.workflow( + name="ConcurrencyCancelInProgress", + concurrency=ConcurrencyExpression( + expression="input.group", + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS, + ), + input_validator=WorkflowInput, +) + + +@concurrency_cancel_in_progress_workflow.task() +async def step1(input: WorkflowInput, ctx: Context) -> None: + for _ in range(50): + await asyncio.sleep(0.10) + + +@concurrency_cancel_in_progress_workflow.task(parents=[step1]) +async def step2(input: WorkflowInput, ctx: Context) -> None: + for _ in range(50): + await asyncio.sleep(0.10) diff --git a/examples/python/concurrency_cancel_newest/test_concurrency_cancel_newest.py b/examples/python/concurrency_cancel_newest/test_concurrency_cancel_newest.py new file mode 100644 index 000000000..3a78f1afb --- /dev/null +++ b/examples/python/concurrency_cancel_newest/test_concurrency_cancel_newest.py @@ -0,0 +1,61 @@ +import asyncio +import time +from uuid import uuid4 + +import pytest + +from examples.concurrency_cancel_newest.worker import ( + WorkflowInput, + concurrency_cancel_newest_workflow, +) +from hatchet_sdk import Hatchet, TriggerWorkflowOptions, V1TaskStatus + + +@pytest.mark.asyncio(loop_scope="session") +async def test_run(hatchet: Hatchet) -> None: + test_run_id = str(uuid4()) + to_run = await concurrency_cancel_newest_workflow.aio_run_no_wait( + WorkflowInput(group="A"), + options=TriggerWorkflowOptions( + additional_metadata={ + "test_run_id": test_run_id, + }, + ), + ) + await asyncio.sleep(1) + + to_cancel = await concurrency_cancel_newest_workflow.aio_run_many_no_wait( + [ + concurrency_cancel_newest_workflow.create_bulk_run_item( + input=WorkflowInput(group="A"), + options=TriggerWorkflowOptions( + additional_metadata={ + "test_run_id": test_run_id, + }, + ), + ) + for _ in range(10) + ] + ) + + await to_run.aio_result() + + for ref in to_cancel: + try: + await ref.aio_result() + except Exception: + pass + + ## wait for the olap repo to catch up + await asyncio.sleep(5) + + successful_run = hatchet.runs.get(to_run.workflow_run_id) + + assert successful_run.run.status == V1TaskStatus.COMPLETED + assert all( + r.status == V1TaskStatus.CANCELLED + for r in hatchet.runs.list( + additional_metadata={"test_run_id": test_run_id} + ).rows + if r.metadata.id != to_run.workflow_run_id + ) diff --git a/examples/python/concurrency_cancel_newest/worker.py b/examples/python/concurrency_cancel_newest/worker.py new file mode 100644 index 000000000..d5798ead4 --- /dev/null +++ b/examples/python/concurrency_cancel_newest/worker.py @@ -0,0 +1,39 @@ +import asyncio + +from pydantic import BaseModel + +from hatchet_sdk import ( + ConcurrencyExpression, + ConcurrencyLimitStrategy, + Context, + Hatchet, +) + +hatchet = Hatchet(debug=True) + + +class WorkflowInput(BaseModel): + group: str + + +concurrency_cancel_newest_workflow = hatchet.workflow( + name="ConcurrencyCancelNewest", + concurrency=ConcurrencyExpression( + expression="input.group", + max_runs=1, + limit_strategy=ConcurrencyLimitStrategy.CANCEL_NEWEST, + ), + input_validator=WorkflowInput, +) + + +@concurrency_cancel_newest_workflow.task() +async def step1(input: WorkflowInput, ctx: Context) -> None: + for _ in range(50): + await asyncio.sleep(0.10) + + +@concurrency_cancel_newest_workflow.task(parents=[step1]) +async def step2(input: WorkflowInput, ctx: Context) -> None: + for _ in range(50): + await asyncio.sleep(0.10) diff --git a/examples/python/dag/worker.py b/examples/python/dag/worker.py index 8cbb0d30e..ea604bc5e 100644 --- a/examples/python/dag/worker.py +++ b/examples/python/dag/worker.py @@ -35,7 +35,7 @@ async def step3(input: EmptyModel, ctx: Context) -> RandomSum: one = ctx.task_output(step1).random_number two = ctx.task_output(step2).random_number - return RandomSum(sum=one + two) + return RandomSum(sum=one + two + 3) @dag_workflow.task(parents=[step1, step3]) diff --git a/examples/python/worker.py b/examples/python/worker.py index 7475123d4..24b71b8bc 100644 --- a/examples/python/worker.py +++ b/examples/python/worker.py @@ -6,6 +6,10 @@ from examples.bulk_operations.worker import ( bulk_replay_test_3, ) from examples.cancellation.worker import cancellation_workflow +from examples.concurrency_cancel_in_progress.worker import ( + concurrency_cancel_in_progress_workflow, +) +from examples.concurrency_cancel_newest.worker import concurrency_cancel_newest_workflow from examples.concurrency_limit.worker import concurrency_limit_workflow from examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow from examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow @@ -68,6 +72,8 @@ def main() -> None: sync_fanout_child, non_retryable_workflow, concurrency_workflow_level_workflow, + concurrency_cancel_newest_workflow, + concurrency_cancel_in_progress_workflow, di_workflow, lifespan_task, simple, diff --git a/frontend/app/package.json b/frontend/app/package.json index 215e8a6f7..112a386fe 100644 --- a/frontend/app/package.json +++ b/frontend/app/package.json @@ -5,7 +5,7 @@ "type": "module", "packageManager": "pnpm@9.15.4", "scripts": { - "dev": "npm run sync-docs && npm run sync-examples && npm run prettier:fix && vite", + "dev": "vite", "build": "tsc && vite build", "lint:check": "npm run eslint:check && npm run prettier:check", "lint:fix": "npm run eslint:fix && npm run prettier:fix", @@ -14,8 +14,7 @@ "prettier:check": "prettier \"src/**/*.{ts,tsx}\" --list-different", "prettier:fix": "prettier \"src/**/*.{ts,tsx}\" --write", "preview": "vite preview", - "sync-docs": "tsx src/next/lib/docs/sync-docs.ts && eslint \"src/next/lib/docs/generated/**/*.{ts,tsx,js}\" --fix --cache && prettier \"src/next/lib/docs/generated/**/*.{ts,tsx}\" --write", - "sync-examples": "cd ../snips/ && pnpm i && pnpm generate && pnpm run copy:app" + "sync-examples": "cd ../snips/ && pnpm i && pnpm generate" }, "dependencies": { "@heroicons/react": "^2.2.0", diff --git a/frontend/app/pnpm-lock.yaml b/frontend/app/pnpm-lock.yaml index 084a2087e..688c9ec79 100644 --- a/frontend/app/pnpm-lock.yaml +++ b/frontend/app/pnpm-lock.yaml @@ -16,100 +16,100 @@ importers: version: 2.2.0(react@18.3.1) '@hookform/resolvers': specifier: ^3.10.0 - version: 3.10.0(react-hook-form@7.56.2(react@18.3.1)) + version: 3.10.0(react-hook-form@7.62.0) '@lukemorales/query-key-factory': specifier: ^1.3.4 - version: 1.3.4(@tanstack/query-core@5.75.5)(@tanstack/react-query@5.75.5(react@18.3.1)) + version: 1.3.4(@tanstack/query-core@5.85.3)(@tanstack/react-query@5.85.3) '@monaco-editor/react': specifier: ^4.7.0 - version: 4.7.0(monaco-editor@0.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 4.7.0(monaco-editor@0.52.2)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-accordion': specifier: ^1.2.3 - version: 1.2.10(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.12(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-alert-dialog': specifier: ^1.1.7 - version: 1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-avatar': specifier: ^1.1.3 - version: 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.10(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-checkbox': specifier: ^1.1.4 - version: 1.3.1(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.3.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-collapsible': specifier: ^1.1.3 - version: 1.1.10(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.12(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-dialog': specifier: ^1.1.6 - version: 1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-dropdown-menu': specifier: ^2.1.6 - version: 2.1.14(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.1.16(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-hover-card': specifier: ^1.1.6 - version: 1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-icons': specifier: ^1.3.2 version: 1.3.2(react@18.3.1) '@radix-ui/react-label': specifier: ^2.1.2 - version: 2.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-menubar': specifier: ^1.1.6 - version: 1.1.14(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.16(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-popover': specifier: ^1.1.6 - version: 1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-radio-group': specifier: ^1.2.3 - version: 1.3.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.3.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-scroll-area': specifier: ^1.2.3 - version: 1.2.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.10(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-select': specifier: ^2.1.6 - version: 2.2.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.2.6(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-separator': specifier: ^1.1.2 - version: 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-slot': specifier: ^1.1.2 - version: 1.2.2(@types/react@18.3.21)(react@18.3.1) + version: 1.2.3(@types/react@18.3.23)(react@18.3.1) '@radix-ui/react-switch': specifier: ^1.1.3 - version: 1.2.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.6(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-tabs': specifier: ^1.1.3 - version: 1.1.11(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.1.13(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-toast': specifier: ^1.2.6 - version: 1.2.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-tooltip': specifier: ^1.1.8 - version: 1.2.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 1.2.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@rjsf/core': specifier: ^5.24.8 - version: 5.24.10(@rjsf/utils@5.24.10(react@18.3.1))(react@18.3.1) + version: 5.24.12(@rjsf/utils@5.24.12)(react@18.3.1) '@rjsf/utils': specifier: ^5.24.8 - version: 5.24.10(react@18.3.1) + version: 5.24.12(react@18.3.1) '@rjsf/validator-ajv8': specifier: ^5.24.8 - version: 5.24.10(@rjsf/utils@5.24.10(react@18.3.1)) + version: 5.24.12(@rjsf/utils@5.24.12) '@sentry/react': specifier: ^7.120.3 - version: 7.120.3(react@18.3.1) + version: 7.120.4(react@18.3.1) '@sentry/vite-plugin': specifier: ^2.23.0 - version: 2.23.0 + version: 2.23.1 '@tanstack/react-query': specifier: ^5.71.1 - version: 5.75.5(react@18.3.1) + version: 5.85.3(react@18.3.1) '@tanstack/react-query-devtools': specifier: ^5.74.6 - version: 5.75.5(@tanstack/react-query@5.75.5(react@18.3.1))(react@18.3.1) + version: 5.85.3(@tanstack/react-query@5.85.3)(react@18.3.1) '@tanstack/react-table': specifier: ^8.21.2 - version: 8.21.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 8.21.3(react-dom@18.3.1)(react@18.3.1) '@types/lodash': specifier: ^4.17.20 version: 4.17.20 @@ -154,7 +154,7 @@ importers: version: 3.12.0(react@18.3.1) '@visx/tooltip': specifier: ^3.12.0 - version: 3.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 3.12.0(react-dom@18.3.1)(react@18.3.1) '@visx/vendor': specifier: ^3.12.0 version: 3.12.0 @@ -172,13 +172,13 @@ importers: version: 2.1.1 cmdk: specifier: ^0.2.1 - version: 0.2.1(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 0.2.1(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) cron-parser: specifier: ^4.9.0 version: 4.9.0 cronstrue: specifier: ^2.57.0 - version: 2.60.0 + version: 2.61.0 dagre: specifier: ^0.8.5 version: 0.8.5 @@ -187,10 +187,10 @@ importers: version: 3.6.0 dompurify: specifier: ^3.2.4 - version: 3.2.5 + version: 3.2.6 jotai: specifier: ^2.12.2 - version: 2.12.4(@types/react@18.3.21)(react@18.3.1) + version: 2.13.1(@babel/core@7.28.3)(@types/react@18.3.23)(react@18.3.1) js-confetti: specifier: ^0.12.0 version: 0.12.0 @@ -202,7 +202,7 @@ importers: version: 0.446.0(react@18.3.1) monaco-themes: specifier: ^0.4.4 - version: 0.4.5 + version: 0.4.6 prism-react-renderer: specifier: ^2.4.1 version: 2.4.1(react@18.3.1) @@ -223,25 +223,25 @@ importers: version: 18.3.1(react@18.3.1) react-hook-form: specifier: ^7.55.0 - version: 7.56.2(react@18.3.1) + version: 7.62.0(react@18.3.1) react-icons: specifier: ^5.5.0 version: 5.5.0(react@18.3.1) react-router-dom: specifier: ^6.30.0 - version: 6.30.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 6.30.1(react-dom@18.3.1)(react@18.3.1) react-syntax-highlighter: specifier: ^15.6.1 version: 15.6.1(react@18.3.1) reactflow: specifier: ^11.11.4 - version: 11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) recharts: specifier: ^2.15.1 - version: 2.15.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.15.4(react-dom@18.3.1)(react@18.3.1) shiki: specifier: ^3.2.2 - version: 3.4.0 + version: 3.9.2 tailwind-merge: specifier: ^2.6.0 version: 2.6.0 @@ -256,59 +256,59 @@ importers: version: 1.3.3 zod: specifier: ^3.24.2 - version: 3.24.4 + version: 3.25.76 devDependencies: '@types/dagre': specifier: ^0.7.52 - version: 0.7.52 + version: 0.7.53 '@types/dompurify': specifier: ^3.2.0 version: 3.2.0 '@types/node': specifier: ^20.17.28 - version: 20.17.43 + version: 20.19.10 '@types/qs': specifier: ^6.9.18 - version: 6.9.18 + version: 6.14.0 '@types/react': specifier: ^18.3.20 - version: 18.3.21 + version: 18.3.23 '@types/react-dom': specifier: ^18.3.5 - version: 18.3.7(@types/react@18.3.21) + version: 18.3.7(@types/react@18.3.23) '@types/react-syntax-highlighter': specifier: ^15.5.13 version: 15.5.13 '@typescript-eslint/eslint-plugin': specifier: ^6.21.0 - version: 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3) + version: 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.57.1)(typescript@5.9.2) '@typescript-eslint/parser': specifier: ^6.21.0 - version: 6.21.0(eslint@8.57.1)(typescript@5.8.3) + version: 6.21.0(eslint@8.57.1)(typescript@5.9.2) '@vitejs/plugin-react': specifier: ^4.3.4 - version: 4.4.1(vite@6.3.5(@types/node@20.17.43)(jiti@2.4.2)(tsx@4.19.4)(yaml@2.7.1)) + version: 4.7.0(vite@6.3.5) autoprefixer: specifier: ^10.4.21 - version: 10.4.21(postcss@8.5.3) + version: 10.4.21(postcss@8.5.6) eslint: specifier: ^8.57.1 version: 8.57.1 eslint-config-airbnb-typescript: specifier: ^17.1.0 - version: 17.1.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3))(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-plugin-import@2.31.0)(eslint@8.57.1) + version: 17.1.0(@typescript-eslint/eslint-plugin@6.21.0)(@typescript-eslint/parser@6.21.0)(eslint-plugin-import@2.32.0)(eslint@8.57.1) eslint-config-prettier: specifier: ^9.1.0 - version: 9.1.0(eslint@8.57.1) + version: 9.1.2(eslint@8.57.1) eslint-import-resolver-typescript: specifier: ^3.10.0 - version: 3.10.1(eslint-plugin-import@2.31.0)(eslint@8.57.1) + version: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1) eslint-plugin-import: specifier: ^2.31.0 - version: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + version: 2.32.0(@typescript-eslint/parser@6.21.0)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) eslint-plugin-prettier: specifier: ^5.2.5 - version: 5.4.0(@types/eslint@8.56.12)(eslint-config-prettier@9.1.0(eslint@8.57.1))(eslint@8.57.1)(prettier@3.5.3) + version: 5.5.4(eslint-config-prettier@9.1.2)(eslint@8.57.1)(prettier@3.6.2) eslint-plugin-react: specifier: ^7.37.4 version: 7.37.5(eslint@8.57.1) @@ -320,34 +320,34 @@ importers: version: 0.4.20(eslint@8.57.1) eslint-plugin-unused-imports: specifier: ^3.2.0 - version: 3.2.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1) + version: 3.2.0(@typescript-eslint/eslint-plugin@6.21.0)(eslint@8.57.1) postcss: specifier: ^8.5.3 - version: 8.5.3 + version: 8.5.6 prettier: specifier: ^3.5.3 - version: 3.5.3 + version: 3.6.2 swagger-typescript-api: specifier: ^13.0.28 - version: 13.1.3 + version: 13.2.8 tailwindcss: specifier: ^3.4.17 version: 3.4.17 ts-unused-exports: specifier: ^11.0.1 - version: 11.0.1(typescript@5.8.3) + version: 11.0.1(typescript@5.9.2) tsx: specifier: ^4.19.3 - version: 4.19.4 + version: 4.20.4 typescript: specifier: ^5.8.2 - version: 5.8.3 + version: 5.9.2 vite: specifier: ^6.2.4 - version: 6.3.5(@types/node@20.17.43)(jiti@2.4.2)(tsx@4.19.4)(yaml@2.7.1) + version: 6.3.5(@types/node@20.19.10)(tsx@4.20.4) vite-plugin-eslint: specifier: ^1.8.1 - version: 1.8.1(eslint@8.57.1)(vite@6.3.5(@types/node@20.17.43)(jiti@2.4.2)(tsx@4.19.4)(yaml@2.7.1)) + version: 1.8.1(eslint@8.57.1)(vite@6.3.5) packages: @@ -363,28 +363,32 @@ packages: resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} engines: {node: '>=6.9.0'} - '@babel/compat-data@7.27.2': - resolution: {integrity: sha512-TUtMJYRPyUb/9aU8f3K0mjmjf6M9N5Woshn2CS6nqJSeJtTtQcpLUXjGt9vbF8ZGff0El99sWkLgzwW3VXnxZQ==} + '@babel/compat-data@7.28.0': + resolution: {integrity: sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==} engines: {node: '>=6.9.0'} - '@babel/core@7.27.1': - resolution: {integrity: sha512-IaaGWsQqfsQWVLqMn9OB92MNN7zukfVA4s7KKAI0KfrrDsZ0yhi5uV4baBuLuN7n3vsZpwP8asPPcVwApxvjBQ==} + '@babel/core@7.28.3': + resolution: {integrity: sha512-yDBHV9kQNcr2/sUr9jghVyz9C3Y5G2zUM2H2lo+9mKv4sFgbA8s8Z9t8D1jiTkGoO/NoIfKMyKWr4s6CN23ZwQ==} engines: {node: '>=6.9.0'} - '@babel/generator@7.27.1': - resolution: {integrity: sha512-UnJfnIpc/+JO0/+KRVQNGU+y5taA5vCbwN8+azkX6beii/ZF+enZJSOKo11ZSzGJjlNfJHfQtmQT8H+9TXPG2w==} + '@babel/generator@7.28.3': + resolution: {integrity: sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==} engines: {node: '>=6.9.0'} '@babel/helper-compilation-targets@7.27.2': resolution: {integrity: sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==} engines: {node: '>=6.9.0'} + '@babel/helper-globals@7.28.0': + resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==} + engines: {node: '>=6.9.0'} + '@babel/helper-module-imports@7.27.1': resolution: {integrity: sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==} engines: {node: '>=6.9.0'} - '@babel/helper-module-transforms@7.27.1': - resolution: {integrity: sha512-9yHn519/8KvTU5BjTVEEeIM3w9/2yXNKoD82JifINImhpKkARMJKPP59kLo+BafpdN5zgNeIcS4jsGDmd3l58g==} + '@babel/helper-module-transforms@7.28.3': + resolution: {integrity: sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==} engines: {node: '>=6.9.0'} peerDependencies: '@babel/core': ^7.0.0 @@ -405,12 +409,12 @@ packages: resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==} engines: {node: '>=6.9.0'} - '@babel/helpers@7.27.1': - resolution: {integrity: sha512-FCvFTm0sWV8Fxhpp2McP5/W53GPllQ9QeQ7SiqGWjMf/LVG07lFa5+pgK05IRhVwtvafT22KF+ZSnM9I545CvQ==} + '@babel/helpers@7.28.3': + resolution: {integrity: sha512-PTNtvUQihsAsDHMOP5pfobP8C6CM4JWXmP8DrEIt46c3r2bf87Ua1zoqevsMo9g+tWDwgWrFP5EIxuBx5RudAw==} engines: {node: '>=6.9.0'} - '@babel/parser@7.27.2': - resolution: {integrity: sha512-QYLs8299NA7WM/bZAdp+CviYYkVoYXlDW2rzliy3chxd1PQjej7JORuMJDJXJUb9g0TT+B99EwaVLKmX+sPXWw==} + '@babel/parser@7.28.3': + resolution: {integrity: sha512-7+Ey1mAgYqFAx2h0RuoxcQT5+MlG3GTV0TQrgr7/ZliKsm/MNDxVVutlWaziMq7wJNAz8MTqz55XLpWvva6StA==} engines: {node: '>=6.0.0'} hasBin: true @@ -426,28 +430,28 @@ packages: peerDependencies: '@babel/core': ^7.0.0-0 - '@babel/runtime@7.27.1': - resolution: {integrity: sha512-1x3D2xEk2fRo3PAhwQwu5UubzgiVWSXTBfWpVd2Mx2AzRqJuDJCsgaDVZ7HB5iGzDW1Hl1sWN2mFyKjmR9uAog==} + '@babel/runtime@7.28.3': + resolution: {integrity: sha512-9uIQ10o0WGdpP6GDhXcdOJPJuDgFtIDtN/9+ArJQ2NAfAmiuhTQdzkaTGR33v43GYS2UrSA0eX2pPPHoFVvpxA==} engines: {node: '>=6.9.0'} '@babel/template@7.27.2': resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==} engines: {node: '>=6.9.0'} - '@babel/traverse@7.27.1': - resolution: {integrity: sha512-ZCYtZciz1IWJB4U61UPu4KEaqyfj+r5T1Q5mqPo+IBpcG9kHv30Z0aD8LXPgC1trYa6rK0orRyAhqUgk4MjmEg==} + '@babel/traverse@7.28.3': + resolution: {integrity: sha512-7w4kZYHneL3A6NP2nxzHvT3HCZ7puDZZjFMqDpBPECub79sTtSO5CGXDkKrTQq8ksAwfD/XI2MRFX23njdDaIQ==} engines: {node: '>=6.9.0'} - '@babel/types@7.27.1': - resolution: {integrity: sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==} + '@babel/types@7.28.2': + resolution: {integrity: sha512-ruv7Ae4J5dUYULmeXw1gmb7rYRz57OWCPM57pHojnLq/3Z1CK2lNSLTCVjxVk1F/TZHwOZZrOWi0ur95BbLxNQ==} engines: {node: '>=6.9.0'} - '@biomejs/js-api@0.7.1': - resolution: {integrity: sha512-VFdgFFZWcyCQxZcAasyv8Enpexn4CblMdWmr6izLYHTLcbd+z9x/LuKU71qnmClABfnYqZjiY7c8DKTVri3Ajw==} + '@biomejs/js-api@2.0.3': + resolution: {integrity: sha512-fg4hGhg1LtohDqLdG9uggROlVsydcuFwZhPoUOTi9+bkvGMULDKL9dEP5iN6++CIA/AN0T20U82rLYuaxc+VDQ==} peerDependencies: - '@biomejs/wasm-bundler': ^1.9.2 - '@biomejs/wasm-nodejs': ^1.9.2 - '@biomejs/wasm-web': ^1.9.2 + '@biomejs/wasm-bundler': ^2.1.1 + '@biomejs/wasm-nodejs': ^2.1.1 + '@biomejs/wasm-web': ^2.1.1 peerDependenciesMeta: '@biomejs/wasm-bundler': optional: true @@ -456,164 +460,170 @@ packages: '@biomejs/wasm-web': optional: true - '@biomejs/wasm-nodejs@1.9.4': - resolution: {integrity: sha512-ZqNlhKcZW6MW1LxWIOfh9YVrBykvzyFad3bOh6JJFraDnNa3NXboRDiaI8dmrbb0ZHXCU1Tsq6WQsKV2Vpp5dw==} + '@biomejs/wasm-nodejs@2.1.4': + resolution: {integrity: sha512-BuvuOiXtTJg9w5+ApMXeJDD5H+XkIJSazIjPJeIxfkCqzVtCByLjCqPC6gbECbylRl9IhrZMWVIskTD6aJ9MzA==} - '@emnapi/core@1.4.3': - resolution: {integrity: sha512-4m62DuCE07lw01soJwPiBGC0nAww0Q+RY70VZ+n49yDIO13yyinhbWCeNnaob0lakDtWQzSdtNWzJeOJt2ma+g==} + '@emnapi/core@1.4.5': + resolution: {integrity: sha512-XsLw1dEOpkSX/WucdqUhPWP7hDxSvZiY+fsUC14h+FtQ2Ifni4znbBt8punRX+Uj2JG/uDb8nEHVKvrVlvdZ5Q==} - '@emnapi/runtime@1.4.3': - resolution: {integrity: sha512-pBPWdu6MLKROBX05wSNKcNb++m5Er+KQ9QkB+WVM+pW2Kx9hoSrVTnu3BdkI5eBLZoKu/J6mW/B6i6bJB2ytXQ==} + '@emnapi/runtime@1.4.5': + resolution: {integrity: sha512-++LApOtY0pEEz1zrd9vy1/zXVaVJJ/EbAF3u0fXIzPJEDtnITsBGbbK0EkM72amhl/R5b+5xx0Y/QhcVOpuulg==} - '@emnapi/wasi-threads@1.0.2': - resolution: {integrity: sha512-5n3nTJblwRi8LlXkJ9eBzu+kZR8Yxcc7ubakyQTFzPMtIhFpUBRbsnc2Dv88IZDIbCDlBiWrknhB4Lsz7mg6BA==} + '@emnapi/wasi-threads@1.0.4': + resolution: {integrity: sha512-PJR+bOmMOPH8AtcTGAyYNiuJ3/Fcoj2XN/gBEWzDIKh254XO+mM9XoXHk5GNEhodxeMznbg7BlRojVbKN+gC6g==} - '@esbuild/aix-ppc64@0.25.4': - resolution: {integrity: sha512-1VCICWypeQKhVbE9oW/sJaAmjLxhVqacdkvPLEjwlttjfwENRSClS8EjBz0KzRyFSCPDIkuXW34Je/vk7zdB7Q==} + '@esbuild/aix-ppc64@0.25.9': + resolution: {integrity: sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==} engines: {node: '>=18'} cpu: [ppc64] os: [aix] - '@esbuild/android-arm64@0.25.4': - resolution: {integrity: sha512-bBy69pgfhMGtCnwpC/x5QhfxAz/cBgQ9enbtwjf6V9lnPI/hMyT9iWpR1arm0l3kttTr4L0KSLpKmLp/ilKS9A==} + '@esbuild/android-arm64@0.25.9': + resolution: {integrity: sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==} engines: {node: '>=18'} cpu: [arm64] os: [android] - '@esbuild/android-arm@0.25.4': - resolution: {integrity: sha512-QNdQEps7DfFwE3hXiU4BZeOV68HHzYwGd0Nthhd3uCkkEKK7/R6MTgM0P7H7FAs5pU/DIWsviMmEGxEoxIZ+ZQ==} + '@esbuild/android-arm@0.25.9': + resolution: {integrity: sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==} engines: {node: '>=18'} cpu: [arm] os: [android] - '@esbuild/android-x64@0.25.4': - resolution: {integrity: sha512-TVhdVtQIFuVpIIR282btcGC2oGQoSfZfmBdTip2anCaVYcqWlZXGcdcKIUklfX2wj0JklNYgz39OBqh2cqXvcQ==} + '@esbuild/android-x64@0.25.9': + resolution: {integrity: sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==} engines: {node: '>=18'} cpu: [x64] os: [android] - '@esbuild/darwin-arm64@0.25.4': - resolution: {integrity: sha512-Y1giCfM4nlHDWEfSckMzeWNdQS31BQGs9/rouw6Ub91tkK79aIMTH3q9xHvzH8d0wDru5Ci0kWB8b3up/nl16g==} + '@esbuild/darwin-arm64@0.25.9': + resolution: {integrity: sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==} engines: {node: '>=18'} cpu: [arm64] os: [darwin] - '@esbuild/darwin-x64@0.25.4': - resolution: {integrity: sha512-CJsry8ZGM5VFVeyUYB3cdKpd/H69PYez4eJh1W/t38vzutdjEjtP7hB6eLKBoOdxcAlCtEYHzQ/PJ/oU9I4u0A==} + '@esbuild/darwin-x64@0.25.9': + resolution: {integrity: sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==} engines: {node: '>=18'} cpu: [x64] os: [darwin] - '@esbuild/freebsd-arm64@0.25.4': - resolution: {integrity: sha512-yYq+39NlTRzU2XmoPW4l5Ifpl9fqSk0nAJYM/V/WUGPEFfek1epLHJIkTQM6bBs1swApjO5nWgvr843g6TjxuQ==} + '@esbuild/freebsd-arm64@0.25.9': + resolution: {integrity: sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-x64@0.25.4': - resolution: {integrity: sha512-0FgvOJ6UUMflsHSPLzdfDnnBBVoCDtBTVyn/MrWloUNvq/5SFmh13l3dvgRPkDihRxb77Y17MbqbCAa2strMQQ==} + '@esbuild/freebsd-x64@0.25.9': + resolution: {integrity: sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==} engines: {node: '>=18'} cpu: [x64] os: [freebsd] - '@esbuild/linux-arm64@0.25.4': - resolution: {integrity: sha512-+89UsQTfXdmjIvZS6nUnOOLoXnkUTB9hR5QAeLrQdzOSWZvNSAXAtcRDHWtqAUtAmv7ZM1WPOOeSxDzzzMogiQ==} + '@esbuild/linux-arm64@0.25.9': + resolution: {integrity: sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==} engines: {node: '>=18'} cpu: [arm64] os: [linux] - '@esbuild/linux-arm@0.25.4': - resolution: {integrity: sha512-kro4c0P85GMfFYqW4TWOpvmF8rFShbWGnrLqlzp4X1TNWjRY3JMYUfDCtOxPKOIY8B0WC8HN51hGP4I4hz4AaQ==} + '@esbuild/linux-arm@0.25.9': + resolution: {integrity: sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==} engines: {node: '>=18'} cpu: [arm] os: [linux] - '@esbuild/linux-ia32@0.25.4': - resolution: {integrity: sha512-yTEjoapy8UP3rv8dB0ip3AfMpRbyhSN3+hY8mo/i4QXFeDxmiYbEKp3ZRjBKcOP862Ua4b1PDfwlvbuwY7hIGQ==} + '@esbuild/linux-ia32@0.25.9': + resolution: {integrity: sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==} engines: {node: '>=18'} cpu: [ia32] os: [linux] - '@esbuild/linux-loong64@0.25.4': - resolution: {integrity: sha512-NeqqYkrcGzFwi6CGRGNMOjWGGSYOpqwCjS9fvaUlX5s3zwOtn1qwg1s2iE2svBe4Q/YOG1q6875lcAoQK/F4VA==} + '@esbuild/linux-loong64@0.25.9': + resolution: {integrity: sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==} engines: {node: '>=18'} cpu: [loong64] os: [linux] - '@esbuild/linux-mips64el@0.25.4': - resolution: {integrity: sha512-IcvTlF9dtLrfL/M8WgNI/qJYBENP3ekgsHbYUIzEzq5XJzzVEV/fXY9WFPfEEXmu3ck2qJP8LG/p3Q8f7Zc2Xg==} + '@esbuild/linux-mips64el@0.25.9': + resolution: {integrity: sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==} engines: {node: '>=18'} cpu: [mips64el] os: [linux] - '@esbuild/linux-ppc64@0.25.4': - resolution: {integrity: sha512-HOy0aLTJTVtoTeGZh4HSXaO6M95qu4k5lJcH4gxv56iaycfz1S8GO/5Jh6X4Y1YiI0h7cRyLi+HixMR+88swag==} + '@esbuild/linux-ppc64@0.25.9': + resolution: {integrity: sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==} engines: {node: '>=18'} cpu: [ppc64] os: [linux] - '@esbuild/linux-riscv64@0.25.4': - resolution: {integrity: sha512-i8JUDAufpz9jOzo4yIShCTcXzS07vEgWzyX3NH2G7LEFVgrLEhjwL3ajFE4fZI3I4ZgiM7JH3GQ7ReObROvSUA==} + '@esbuild/linux-riscv64@0.25.9': + resolution: {integrity: sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==} engines: {node: '>=18'} cpu: [riscv64] os: [linux] - '@esbuild/linux-s390x@0.25.4': - resolution: {integrity: sha512-jFnu+6UbLlzIjPQpWCNh5QtrcNfMLjgIavnwPQAfoGx4q17ocOU9MsQ2QVvFxwQoWpZT8DvTLooTvmOQXkO51g==} + '@esbuild/linux-s390x@0.25.9': + resolution: {integrity: sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==} engines: {node: '>=18'} cpu: [s390x] os: [linux] - '@esbuild/linux-x64@0.25.4': - resolution: {integrity: sha512-6e0cvXwzOnVWJHq+mskP8DNSrKBr1bULBvnFLpc1KY+d+irZSgZ02TGse5FsafKS5jg2e4pbvK6TPXaF/A6+CA==} + '@esbuild/linux-x64@0.25.9': + resolution: {integrity: sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==} engines: {node: '>=18'} cpu: [x64] os: [linux] - '@esbuild/netbsd-arm64@0.25.4': - resolution: {integrity: sha512-vUnkBYxZW4hL/ie91hSqaSNjulOnYXE1VSLusnvHg2u3jewJBz3YzB9+oCw8DABeVqZGg94t9tyZFoHma8gWZQ==} + '@esbuild/netbsd-arm64@0.25.9': + resolution: {integrity: sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==} engines: {node: '>=18'} cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-x64@0.25.4': - resolution: {integrity: sha512-XAg8pIQn5CzhOB8odIcAm42QsOfa98SBeKUdo4xa8OvX8LbMZqEtgeWE9P/Wxt7MlG2QqvjGths+nq48TrUiKw==} + '@esbuild/netbsd-x64@0.25.9': + resolution: {integrity: sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==} engines: {node: '>=18'} cpu: [x64] os: [netbsd] - '@esbuild/openbsd-arm64@0.25.4': - resolution: {integrity: sha512-Ct2WcFEANlFDtp1nVAXSNBPDxyU+j7+tId//iHXU2f/lN5AmO4zLyhDcpR5Cz1r08mVxzt3Jpyt4PmXQ1O6+7A==} + '@esbuild/openbsd-arm64@0.25.9': + resolution: {integrity: sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-x64@0.25.4': - resolution: {integrity: sha512-xAGGhyOQ9Otm1Xu8NT1ifGLnA6M3sJxZ6ixylb+vIUVzvvd6GOALpwQrYrtlPouMqd/vSbgehz6HaVk4+7Afhw==} + '@esbuild/openbsd-x64@0.25.9': + resolution: {integrity: sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==} engines: {node: '>=18'} cpu: [x64] os: [openbsd] - '@esbuild/sunos-x64@0.25.4': - resolution: {integrity: sha512-Mw+tzy4pp6wZEK0+Lwr76pWLjrtjmJyUB23tHKqEDP74R3q95luY/bXqXZeYl4NYlvwOqoRKlInQialgCKy67Q==} + '@esbuild/openharmony-arm64@0.25.9': + resolution: {integrity: sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.25.9': + resolution: {integrity: sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==} engines: {node: '>=18'} cpu: [x64] os: [sunos] - '@esbuild/win32-arm64@0.25.4': - resolution: {integrity: sha512-AVUP428VQTSddguz9dO9ngb+E5aScyg7nOeJDrF1HPYu555gmza3bDGMPhmVXL8svDSoqPCsCPjb265yG/kLKQ==} + '@esbuild/win32-arm64@0.25.9': + resolution: {integrity: sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==} engines: {node: '>=18'} cpu: [arm64] os: [win32] - '@esbuild/win32-ia32@0.25.4': - resolution: {integrity: sha512-i1sW+1i+oWvQzSgfRcxxG2k4I9n3O9NRqy8U+uugaT2Dy7kLO9Y7wI72haOahxceMX8hZAzgGou1FhndRldxRg==} + '@esbuild/win32-ia32@0.25.9': + resolution: {integrity: sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==} engines: {node: '>=18'} cpu: [ia32] os: [win32] - '@esbuild/win32-x64@0.25.4': - resolution: {integrity: sha512-nOT2vZNw6hJ+z43oP1SPea/G/6AbN6X+bGNhNuq8NtRHy4wsMhw765IKLNmnjek7GvjWBYQ8Q5VBoYTFg9y1UQ==} + '@esbuild/win32-x64@0.25.9': + resolution: {integrity: sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==} engines: {node: '>=18'} cpu: [x64] os: [win32] @@ -639,20 +649,20 @@ packages: '@exodus/schemasafe@1.3.0': resolution: {integrity: sha512-5Aap/GaRupgNx/feGBwLLTVv8OQFfv3pq2lPRzPg9R+IOBnDgghTGW7l7EuVXOvg5cc/xSAlRW8rBrjIC3Nvqw==} - '@floating-ui/core@1.7.0': - resolution: {integrity: sha512-FRdBLykrPPA6P76GGGqlex/e7fbe0F1ykgxHYNXQsH/iTEtjMj/f9bpY5oQqbjt5VgZvgz/uKXbGuROijh3VLA==} + '@floating-ui/core@1.7.3': + resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==} - '@floating-ui/dom@1.7.0': - resolution: {integrity: sha512-lGTor4VlXcesUMh1cupTUTDoCxMb0V6bm3CnxHzQcw8Eaf1jQbgQX4i02fYgT0vJ82tb5MZ4CZk1LRGkktJCzg==} + '@floating-ui/dom@1.7.3': + resolution: {integrity: sha512-uZA413QEpNuhtb3/iIKoYMSK07keHPYeXF02Zhd6e213j+d1NamLix/mCLxBUDW/Gx52sPH2m+chlUsyaBs/Ag==} - '@floating-ui/react-dom@2.1.2': - resolution: {integrity: sha512-06okr5cgPzMNBy+Ycse2A6udMi4bqwW/zgBF/rwjcNqWkyr82Mcg8b0vjX8OJpZFy/FKjJmw6wV7t44kK6kW7A==} + '@floating-ui/react-dom@2.1.5': + resolution: {integrity: sha512-HDO/1/1oH9fjj4eLgegrlH3dklZpHtUYYFiVwMUwfGvk9jWDRWqkklA2/NFScknrcNSspbV868WjXORvreDX+Q==} peerDependencies: react: '>=16.8.0' react-dom: '>=16.8.0' - '@floating-ui/utils@0.2.9': - resolution: {integrity: sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==} + '@floating-ui/utils@0.2.10': + resolution: {integrity: sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==} '@heroicons/react@2.2.0': resolution: {integrity: sha512-LMcepvRaS9LYHJGsF0zzmgKCUim/X3N/DQKc4jepAXJ7l8QxJ1PmxJzqplF2Z3FE4PqBAIGyJAQ/w4B5dsqbtQ==} @@ -681,23 +691,18 @@ packages: resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} - '@jridgewell/gen-mapping@0.3.8': - resolution: {integrity: sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==} - engines: {node: '>=6.0.0'} + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} '@jridgewell/resolve-uri@3.1.2': resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} engines: {node: '>=6.0.0'} - '@jridgewell/set-array@1.2.1': - resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==} - engines: {node: '>=6.0.0'} + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} - '@jridgewell/sourcemap-codec@1.5.0': - resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} - - '@jridgewell/trace-mapping@0.3.25': - resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} + '@jridgewell/trace-mapping@0.3.30': + resolution: {integrity: sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==} '@lukemorales/query-key-factory@1.3.4': resolution: {integrity: sha512-A3frRDdkmaNNQi6mxIshsDk4chRXWoXa05US8fBo4kci/H+lVmujS6QrwQLLGIkNIRFGjMqp2uKjC4XsLdydRw==} @@ -716,8 +721,8 @@ packages: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - '@napi-rs/wasm-runtime@0.2.9': - resolution: {integrity: sha512-OKRBiajrrxB9ATokgEQoG87Z25c67pCpYcCwmXYX8PBftC9pBfN18gnm/fh1wurSLEKIAt+QRFLFCQISrb66Jg==} + '@napi-rs/wasm-runtime@0.2.12': + resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -739,8 +744,8 @@ packages: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} - '@pkgr/core@0.2.4': - resolution: {integrity: sha512-ROFF39F6ZrnzSUEmQQZUar0Jt4xVoP9WnDRdWwF4NNcXs3xBTLgBUDoOwW141y1jP+S8nahIbdxbFC7IShw9Iw==} + '@pkgr/core@0.2.9': + resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} '@radix-ui/number@1.1.1': @@ -749,11 +754,11 @@ packages: '@radix-ui/primitive@1.0.0': resolution: {integrity: sha512-3e7rn8FDMin4CgeL7Z/49smCA3rFYY3Ha2rUQ7HRWFadS5iCRw08ZgVT1LaNTCNqgvrUiyczLflrVrF0SRQtNA==} - '@radix-ui/primitive@1.1.2': - resolution: {integrity: sha512-XnbHrrprsNqZKQhStrSwgRUQzoCI1glLzdw79xiZPoofhGICeZRSQ3dIxAKH1gb3OHfNf4d6f+vAv3kil2eggA==} + '@radix-ui/primitive@1.1.3': + resolution: {integrity: sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==} - '@radix-ui/react-accordion@1.2.10': - resolution: {integrity: sha512-x+URzV1siKmeXPSUIQ22L81qp2eOhjpy3tgteF+zOr4d1u0qJnFuyBF4MoQRhmKP6ivDxlvDAvqaF77gh7DOIw==} + '@radix-ui/react-accordion@1.2.12': + resolution: {integrity: sha512-T4nygeh9YE9dLRPhAHSeOZi7HBXo+0kYIPJXayZfvWOWA0+n3dESrZbjfDPUABkUNym6Hd+f2IR113To8D2GPA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -765,8 +770,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-alert-dialog@1.1.13': - resolution: {integrity: sha512-/uPs78OwxGxslYOG5TKeUsv9fZC0vo376cXSADdKirTmsLJU2au6L3n34c3p6W26rFDDDze/hwy4fYeNd0qdGA==} + '@radix-ui/react-alert-dialog@1.1.15': + resolution: {integrity: sha512-oTVLkEw5GpdRe29BqJ0LSDFWI3qu0vR1M0mUkOQWDIUnY/QIkLpgDMWuKxP94c2NAC2LGcgVhG1ImF3jkZ5wXw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -778,8 +783,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-arrow@1.1.6': - resolution: {integrity: sha512-2JMfHJf/eVnwq+2dewT3C0acmCWD3XiVA1Da+jTDqo342UlU13WvXtqHhG+yJw5JeQmu4ue2eMy6gcEArLBlcw==} + '@radix-ui/react-arrow@1.1.7': + resolution: {integrity: sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -791,8 +796,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-avatar@1.1.9': - resolution: {integrity: sha512-10tQokfvZdFvnvDkcOJPjm2pWiP8A0R4T83MoD7tb15bC/k2GU7B1YBuzJi8lNQ8V1QqhP8ocNqp27ByZaNagQ==} + '@radix-ui/react-avatar@1.1.10': + resolution: {integrity: sha512-V8piFfWapM5OmNCXTzVQY+E1rDa53zY+MQ4Y7356v4fFz6vqCyUtIz2rUD44ZEdwg78/jKmMJHj07+C/Z/rcog==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -804,8 +809,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-checkbox@1.3.1': - resolution: {integrity: sha512-xTaLKAO+XXMPK/BpVTSaAAhlefmvMSACjIhK9mGsImvX2ljcTDm8VGR1CuS1uYcNdR5J+oiOhoJZc5un6bh3VQ==} + '@radix-ui/react-checkbox@1.3.3': + resolution: {integrity: sha512-wBbpv+NQftHDdG86Qc0pIyXk5IR3tM8Vd0nWLKDcX8nNn4nXFOFwsKuqw2okA/1D/mpaAkmuyndrPJTYDNZtFw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -817,8 +822,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-collapsible@1.1.10': - resolution: {integrity: sha512-O2mcG3gZNkJ/Ena34HurA3llPOEA/M4dJtIRMa6y/cknRDC8XY5UZBInKTsUwW5cUue9A4k0wi1XU5fKBzKe1w==} + '@radix-ui/react-collapsible@1.1.12': + resolution: {integrity: sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -830,8 +835,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-collection@1.1.6': - resolution: {integrity: sha512-PbhRFK4lIEw9ADonj48tiYWzkllz81TM7KVYyyMMw2cwHO7D5h4XKEblL8NlaRisTK3QTe6tBEhDccFUryxHBQ==} + '@radix-ui/react-collection@1.1.7': + resolution: {integrity: sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -877,8 +882,8 @@ packages: react: ^16.8 || ^17.0 || ^18.0 react-dom: ^16.8 || ^17.0 || ^18.0 - '@radix-ui/react-dialog@1.1.13': - resolution: {integrity: sha512-ARFmqUyhIVS3+riWzwGTe7JLjqwqgnODBUZdqpWar/z1WFs9z76fuOs/2BOWCR+YboRn4/WN9aoaGVwqNRr8VA==} + '@radix-ui/react-dialog@1.1.15': + resolution: {integrity: sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -905,8 +910,8 @@ packages: react: ^16.8 || ^17.0 || ^18.0 react-dom: ^16.8 || ^17.0 || ^18.0 - '@radix-ui/react-dismissable-layer@1.1.9': - resolution: {integrity: sha512-way197PiTvNp+WBP7svMJasHl+vibhWGQDb6Mgf5mhEWJkgb85z7Lfl9TUdkqpWsf8GRNmoopx9ZxCyDzmgRMQ==} + '@radix-ui/react-dismissable-layer@1.1.11': + resolution: {integrity: sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -918,8 +923,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-dropdown-menu@2.1.14': - resolution: {integrity: sha512-lzuyNjoWOoaMFE/VC5FnAAYM16JmQA8ZmucOXtlhm2kKR5TSU95YLAueQ4JYuRmUJmBvSqXaVFGIfuukybwZJQ==} + '@radix-ui/react-dropdown-menu@2.1.16': + resolution: {integrity: sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -936,8 +941,8 @@ packages: peerDependencies: react: ^16.8 || ^17.0 || ^18.0 - '@radix-ui/react-focus-guards@1.1.2': - resolution: {integrity: sha512-fyjAACV62oPV925xFCrH8DR5xWhg9KYtJT4s3u54jxp+L/hbpTY2kIeEFFbFe+a/HCE94zGQMZLIpVTPVZDhaA==} + '@radix-ui/react-focus-guards@1.1.3': + resolution: {integrity: sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==} peerDependencies: '@types/react': '*' react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc @@ -951,8 +956,8 @@ packages: react: ^16.8 || ^17.0 || ^18.0 react-dom: ^16.8 || ^17.0 || ^18.0 - '@radix-ui/react-focus-scope@1.1.6': - resolution: {integrity: sha512-r9zpYNUQY+2jWHWZGyddQLL9YHkM/XvSFHVcWs7bdVuxMAnCwTAuy6Pf47Z4nw7dYcUou1vg/VgjjrrH03VeBw==} + '@radix-ui/react-focus-scope@1.1.7': + resolution: {integrity: sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -964,8 +969,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-hover-card@1.1.13': - resolution: {integrity: sha512-Wtjvx0d/6Bgd/jAYS1mW6IPSUQ25y0hkUSOS1z5/4+U8+DJPwKroqJlM/AlVFl3LywGoruiPmcvB9Aks9mSOQw==} + '@radix-ui/react-hover-card@1.1.15': + resolution: {integrity: sha512-qgTkjNT1CfKMoP0rcasmlH2r1DAiYicWsDsufxl940sT2wHNEWWv6FMWIQXWhVdmC1d/HYfbhQx60KYyAtKxjg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -996,8 +1001,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-label@2.1.6': - resolution: {integrity: sha512-S/hv1mTlgcPX2gCTJrWuTjSXf7ER3Zf7zWGtOprxhIIY93Qin3n5VgNA0Ez9AgrK/lEtlYgzLd4f5x6AVar4Yw==} + '@radix-ui/react-label@2.1.7': + resolution: {integrity: sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1009,8 +1014,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-menu@2.1.14': - resolution: {integrity: sha512-0zSiBAIFq9GSKoSH5PdEaQeRB3RnEGxC+H2P0egtnKoKKLNBH8VBHyVO6/jskhjAezhOIplyRUj7U2lds9A+Yg==} + '@radix-ui/react-menu@2.1.16': + resolution: {integrity: sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1022,8 +1027,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-menubar@1.1.14': - resolution: {integrity: sha512-nWLOS7EG3iYhT/zlE/Pbip17rrMnV/0AS7ueb3pKHTSAnpA6/N9rXQYowulZw4owZ9P+qSilHsFzSx/kU7yplQ==} + '@radix-ui/react-menubar@1.1.16': + resolution: {integrity: sha512-EB1FktTz5xRRi2Er974AUQZWg2yVBb1yjip38/lgwtCVRd3a+maUoGHN/xs9Yv8SY8QwbSEb+YrxGadVWbEutA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1035,8 +1040,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-popover@1.1.13': - resolution: {integrity: sha512-84uqQV3omKDR076izYgcha6gdpN8m3z6w/AeJ83MSBJYVG/AbOHdLjAgsPZkeC/kt+k64moXFCnio8BbqXszlw==} + '@radix-ui/react-popover@1.1.15': + resolution: {integrity: sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1048,8 +1053,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-popper@1.2.6': - resolution: {integrity: sha512-7iqXaOWIjDBfIG7aq8CUEeCSsQMLFdn7VEE8TaFz704DtEzpPHR7w/uuzRflvKgltqSAImgcmxQ7fFX3X7wasg==} + '@radix-ui/react-popper@1.2.8': + resolution: {integrity: sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1067,8 +1072,8 @@ packages: react: ^16.8 || ^17.0 || ^18.0 react-dom: ^16.8 || ^17.0 || ^18.0 - '@radix-ui/react-portal@1.1.8': - resolution: {integrity: sha512-hQsTUIn7p7fxCPvao/q6wpbxmCwgLrlz+nOrJgC+RwfZqWY/WN+UMqkXzrtKbPrF82P43eCTl3ekeKuyAQbFeg==} + '@radix-ui/react-portal@1.1.9': + resolution: {integrity: sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1086,8 +1091,8 @@ packages: react: ^16.8 || ^17.0 || ^18.0 react-dom: ^16.8 || ^17.0 || ^18.0 - '@radix-ui/react-presence@1.1.4': - resolution: {integrity: sha512-ueDqRbdc4/bkaQT3GIpLQssRlFgWaL/U2z/S31qRwwLWoxHLgry3SIfCwhxeQNbirEUXFa+lq3RL3oBYXtcmIA==} + '@radix-ui/react-presence@1.1.5': + resolution: {integrity: sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1105,8 +1110,8 @@ packages: react: ^16.8 || ^17.0 || ^18.0 react-dom: ^16.8 || ^17.0 || ^18.0 - '@radix-ui/react-primitive@2.1.2': - resolution: {integrity: sha512-uHa+l/lKfxuDD2zjN/0peM/RhhSmRjr5YWdk/37EnSv1nJ88uvG85DPexSm8HdFQROd2VdERJ6ynXbkCFi+APw==} + '@radix-ui/react-primitive@2.1.3': + resolution: {integrity: sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1118,8 +1123,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-radio-group@1.3.6': - resolution: {integrity: sha512-1tfTAqnYZNVwSpFhCT273nzK8qGBReeYnNTPspCggqk1fvIrfVxJekIuBFidNivzpdiMqDwVGnQvHqXrRPM4Og==} + '@radix-ui/react-radio-group@1.3.8': + resolution: {integrity: sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1131,8 +1136,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-roving-focus@1.1.9': - resolution: {integrity: sha512-ZzrIFnMYHHCNqSNCsuN6l7wlewBEq0O0BCSBkabJMFXVO51LRUTq71gLP1UxFvmrXElqmPjA5VX7IqC9VpazAQ==} + '@radix-ui/react-roving-focus@1.1.11': + resolution: {integrity: sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1144,8 +1149,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-scroll-area@1.2.8': - resolution: {integrity: sha512-K5h1RkYA6M0Sn61BV5LQs686zqBsSC0sGzL4/Gw4mNnjzrQcGSc6YXfC6CRFNaGydSdv5+M8cb0eNsOGo0OXtQ==} + '@radix-ui/react-scroll-area@1.2.10': + resolution: {integrity: sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1157,8 +1162,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-select@2.2.4': - resolution: {integrity: sha512-/OOm58Gil4Ev5zT8LyVzqfBcij4dTHYdeyuF5lMHZ2bIp0Lk9oETocYiJ5QC0dHekEQnK6L/FNJCceeb4AkZ6Q==} + '@radix-ui/react-select@2.2.6': + resolution: {integrity: sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1170,8 +1175,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-separator@1.1.6': - resolution: {integrity: sha512-Izof3lPpbCfTM7WDta+LRkz31jem890VjEvpVRoWQNKpDUMMVffuyq854XPGP1KYGWWmjmYvHvPFeocWhFCy1w==} + '@radix-ui/react-separator@1.1.7': + resolution: {integrity: sha512-0HEb8R9E8A+jZjvmFCy/J4xhbXy3TV+9XSnGJ3KvTtjlIUy/YQ/p6UYZvi7YbeoeXdyU9+Y3scizK6hkY37baA==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1188,8 +1193,8 @@ packages: peerDependencies: react: ^16.8 || ^17.0 || ^18.0 - '@radix-ui/react-slot@1.2.2': - resolution: {integrity: sha512-y7TBO4xN4Y94FvcWIOIh18fM4R1A8S4q1jhoz4PNzOoHsFcN8pogcFmZrTYAm4F9VRUrWP/Mw7xSKybIeRI+CQ==} + '@radix-ui/react-slot@1.2.3': + resolution: {integrity: sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==} peerDependencies: '@types/react': '*' react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc @@ -1197,8 +1202,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-switch@1.2.4': - resolution: {integrity: sha512-yZCky6XZFnR7pcGonJkr9VyNRu46KcYAbyg1v/gVVCZUr8UJ4x+RpncC27hHtiZ15jC+3WS8Yg/JSgyIHnYYsQ==} + '@radix-ui/react-switch@1.2.6': + resolution: {integrity: sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1210,8 +1215,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-tabs@1.1.11': - resolution: {integrity: sha512-4FiKSVoXqPP/KfzlB7lwwqoFV6EPwkrrqGp9cUYXjwDYHhvpnqq79P+EPHKcdoTE7Rl8w/+6s9rTlsfXHES9GA==} + '@radix-ui/react-tabs@1.1.13': + resolution: {integrity: sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1223,8 +1228,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-toast@1.2.13': - resolution: {integrity: sha512-e/e43mQAwgYs8BY4y9l99xTK6ig1bK2uXsFLOMn9IZ16lAgulSTsotcPHVT2ZlSb/ye6Sllq7IgyDB8dGhpeXQ==} + '@radix-ui/react-toast@1.2.15': + resolution: {integrity: sha512-3OSz3TacUWy4WtOXV38DggwxoqJK4+eDkNMl5Z/MJZaoUPaP4/9lf81xXMe1I2ReTAptverZUpbPY4wWwWyL5g==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1236,8 +1241,8 @@ packages: '@types/react-dom': optional: true - '@radix-ui/react-tooltip@1.2.6': - resolution: {integrity: sha512-zYb+9dc9tkoN2JjBDIIPLQtk3gGyz8FMKoqYTb8EMVQ5a5hBcdHPECrsZVI4NpPAUOixhkoqg7Hj5ry5USowfA==} + '@radix-ui/react-tooltip@1.2.8': + resolution: {integrity: sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1350,8 +1355,8 @@ packages: '@types/react': optional: true - '@radix-ui/react-visually-hidden@1.2.2': - resolution: {integrity: sha512-ORCmRUbNiZIv6uV5mhFrhsIKw4UX/N3syZtyqvry61tbGm4JlgQuSn0hk5TwCARsCjkcnuRkSdCE3xfb+ADHew==} + '@radix-ui/react-visually-hidden@1.2.3': + resolution: {integrity: sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==} peerDependencies: '@types/react': '*' '@types/react-dom': '*' @@ -1406,154 +1411,157 @@ packages: resolution: {integrity: sha512-O3rHJzAQKamUz1fvE0Qaw0xSFqsA/yafi2iqeE0pvdFtCO1viYx8QL6f3Ln/aCCTLxs68SLf0KPM9eSeM8yBnA==} engines: {node: '>=14.0.0'} - '@rjsf/core@5.24.10': - resolution: {integrity: sha512-DJe2OECdDoBHacQKCPOAiuoEydM7ZJUUFqgcO4VswRs13QglRaEAEk4ylWR6f2gQx7wBg0u302n3VvRoHRxegQ==} + '@rjsf/core@5.24.12': + resolution: {integrity: sha512-OWVdC501n3Io0hplgpnkzArpcUSiImMgLQhk6/EI8wu2xbvk5fTiM7YAVlAObpAD3z3LRrAwhjnmh9L4k/FWmQ==} engines: {node: '>=14'} peerDependencies: '@rjsf/utils': ^5.24.x react: ^16.14.0 || >=17 - '@rjsf/utils@5.24.10': - resolution: {integrity: sha512-afsi+oKNV12p6OnBydJ0R4M87h7SaArqAhqJPqVXbMldnbPnW632THCtod5GG+IYtkqUkx0iQq2xpHw1OBf+3A==} + '@rjsf/utils@5.24.12': + resolution: {integrity: sha512-fDwQB0XkjZjpdFUz5UAnuZj8nnbxDbX5tp+jTOjjJKw2TMQ9gFFYCQ12lSpdhezA2YgEGZfxyYTGW0DKDL5Drg==} engines: {node: '>=14'} peerDependencies: react: ^16.14.0 || >=17 - '@rjsf/validator-ajv8@5.24.10': - resolution: {integrity: sha512-BLMpJNjtRAlp1i/QJBxY4/tchkWpZuJ/dJbjk+75veQ5oScr2YJ0/u2NqSsJNSyT37XxNJ6w49HgxZfB9xkDIQ==} + '@rjsf/validator-ajv8@5.24.12': + resolution: {integrity: sha512-IMXdCjvDNdvb+mDgZC3AlAtr0pjYKq5s0GcLECjG5PuiX7Ib4JaDQHZY5ZJdKblMfgzhsn8AAOi573jXAt7BHQ==} engines: {node: '>=14'} peerDependencies: '@rjsf/utils': ^5.24.x + '@rolldown/pluginutils@1.0.0-beta.27': + resolution: {integrity: sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==} + '@rollup/pluginutils@4.2.1': resolution: {integrity: sha512-iKnFXr7NkdZAIHiIWE+BX5ULi/ucVFYWD6TbAV+rZctiRTY2PL6tsIKhoIOaoskiWAkgu+VsbXgUVDNLHf+InQ==} engines: {node: '>= 8.0.0'} - '@rollup/rollup-android-arm-eabi@4.40.2': - resolution: {integrity: sha512-JkdNEq+DFxZfUwxvB58tHMHBHVgX23ew41g1OQinthJ+ryhdRk67O31S7sYw8u2lTjHUPFxwar07BBt1KHp/hg==} + '@rollup/rollup-android-arm-eabi@4.46.2': + resolution: {integrity: sha512-Zj3Hl6sN34xJtMv7Anwb5Gu01yujyE/cLBDB2gnHTAHaWS1Z38L7kuSG+oAh0giZMqG060f/YBStXtMH6FvPMA==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.40.2': - resolution: {integrity: sha512-13unNoZ8NzUmnndhPTkWPWbX3vtHodYmy+I9kuLxN+F+l+x3LdVF7UCu8TWVMt1POHLh6oDHhnOA04n8oJZhBw==} + '@rollup/rollup-android-arm64@4.46.2': + resolution: {integrity: sha512-nTeCWY83kN64oQ5MGz3CgtPx8NSOhC5lWtsjTs+8JAJNLcP3QbLCtDDgUKQc/Ro/frpMq4SHUaHN6AMltcEoLQ==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.40.2': - resolution: {integrity: sha512-Gzf1Hn2Aoe8VZzevHostPX23U7N5+4D36WJNHK88NZHCJr7aVMG4fadqkIf72eqVPGjGc0HJHNuUaUcxiR+N/w==} + '@rollup/rollup-darwin-arm64@4.46.2': + resolution: {integrity: sha512-HV7bW2Fb/F5KPdM/9bApunQh68YVDU8sO8BvcW9OngQVN3HHHkw99wFupuUJfGR9pYLLAjcAOA6iO+evsbBaPQ==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.40.2': - resolution: {integrity: sha512-47N4hxa01a4x6XnJoskMKTS8XZ0CZMd8YTbINbi+w03A2w4j1RTlnGHOz/P0+Bg1LaVL6ufZyNprSg+fW5nYQQ==} + '@rollup/rollup-darwin-x64@4.46.2': + resolution: {integrity: sha512-SSj8TlYV5nJixSsm/y3QXfhspSiLYP11zpfwp6G/YDXctf3Xkdnk4woJIF5VQe0of2OjzTt8EsxnJDCdHd2xMA==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.40.2': - resolution: {integrity: sha512-8t6aL4MD+rXSHHZUR1z19+9OFJ2rl1wGKvckN47XFRVO+QL/dUSpKA2SLRo4vMg7ELA8pzGpC+W9OEd1Z/ZqoQ==} + '@rollup/rollup-freebsd-arm64@4.46.2': + resolution: {integrity: sha512-ZyrsG4TIT9xnOlLsSSi9w/X29tCbK1yegE49RYm3tu3wF1L/B6LVMqnEWyDB26d9Ecx9zrmXCiPmIabVuLmNSg==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.40.2': - resolution: {integrity: sha512-C+AyHBzfpsOEYRFjztcYUFsH4S7UsE9cDtHCtma5BK8+ydOZYgMmWg1d/4KBytQspJCld8ZIujFMAdKG1xyr4Q==} + '@rollup/rollup-freebsd-x64@4.46.2': + resolution: {integrity: sha512-pCgHFoOECwVCJ5GFq8+gR8SBKnMO+xe5UEqbemxBpCKYQddRQMgomv1104RnLSg7nNvgKy05sLsY51+OVRyiVw==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.40.2': - resolution: {integrity: sha512-de6TFZYIvJwRNjmW3+gaXiZ2DaWL5D5yGmSYzkdzjBDS3W+B9JQ48oZEsmMvemqjtAFzE16DIBLqd6IQQRuG9Q==} + '@rollup/rollup-linux-arm-gnueabihf@4.46.2': + resolution: {integrity: sha512-EtP8aquZ0xQg0ETFcxUbU71MZlHaw9MChwrQzatiE8U/bvi5uv/oChExXC4mWhjiqK7azGJBqU0tt5H123SzVA==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.40.2': - resolution: {integrity: sha512-urjaEZubdIkacKc930hUDOfQPysezKla/O9qV+O89enqsqUmQm8Xj8O/vh0gHg4LYfv7Y7UsE3QjzLQzDYN1qg==} + '@rollup/rollup-linux-arm-musleabihf@4.46.2': + resolution: {integrity: sha512-qO7F7U3u1nfxYRPM8HqFtLd+raev2K137dsV08q/LRKRLEc7RsiDWihUnrINdsWQxPR9jqZ8DIIZ1zJJAm5PjQ==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.40.2': - resolution: {integrity: sha512-KlE8IC0HFOC33taNt1zR8qNlBYHj31qGT1UqWqtvR/+NuCVhfufAq9fxO8BMFC22Wu0rxOwGVWxtCMvZVLmhQg==} + '@rollup/rollup-linux-arm64-gnu@4.46.2': + resolution: {integrity: sha512-3dRaqLfcOXYsfvw5xMrxAk9Lb1f395gkoBYzSFcc/scgRFptRXL9DOaDpMiehf9CO8ZDRJW2z45b6fpU5nwjng==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.40.2': - resolution: {integrity: sha512-j8CgxvfM0kbnhu4XgjnCWJQyyBOeBI1Zq91Z850aUddUmPeQvuAy6OiMdPS46gNFgy8gN1xkYyLgwLYZG3rBOg==} + '@rollup/rollup-linux-arm64-musl@4.46.2': + resolution: {integrity: sha512-fhHFTutA7SM+IrR6lIfiHskxmpmPTJUXpWIsBXpeEwNgZzZZSg/q4i6FU4J8qOGyJ0TR+wXBwx/L7Ho9z0+uDg==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loongarch64-gnu@4.40.2': - resolution: {integrity: sha512-Ybc/1qUampKuRF4tQXc7G7QY9YRyeVSykfK36Y5Qc5dmrIxwFhrOzqaVTNoZygqZ1ZieSWTibfFhQ5qK8jpWxw==} + '@rollup/rollup-linux-loongarch64-gnu@4.46.2': + resolution: {integrity: sha512-i7wfGFXu8x4+FRqPymzjD+Hyav8l95UIZ773j7J7zRYc3Xsxy2wIn4x+llpunexXe6laaO72iEjeeGyUFmjKeA==} cpu: [loong64] os: [linux] - '@rollup/rollup-linux-powerpc64le-gnu@4.40.2': - resolution: {integrity: sha512-3FCIrnrt03CCsZqSYAOW/k9n625pjpuMzVfeI+ZBUSDT3MVIFDSPfSUgIl9FqUftxcUXInvFah79hE1c9abD+Q==} + '@rollup/rollup-linux-ppc64-gnu@4.46.2': + resolution: {integrity: sha512-B/l0dFcHVUnqcGZWKcWBSV2PF01YUt0Rvlurci5P+neqY/yMKchGU8ullZvIv5e8Y1C6wOn+U03mrDylP5q9Yw==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.40.2': - resolution: {integrity: sha512-QNU7BFHEvHMp2ESSY3SozIkBPaPBDTsfVNGx3Xhv+TdvWXFGOSH2NJvhD1zKAT6AyuuErJgbdvaJhYVhVqrWTg==} + '@rollup/rollup-linux-riscv64-gnu@4.46.2': + resolution: {integrity: sha512-32k4ENb5ygtkMwPMucAb8MtV8olkPT03oiTxJbgkJa7lJ7dZMr0GCFJlyvy+K8iq7F/iuOr41ZdUHaOiqyR3iQ==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-riscv64-musl@4.40.2': - resolution: {integrity: sha512-5W6vNYkhgfh7URiXTO1E9a0cy4fSgfE4+Hl5agb/U1sa0kjOLMLC1wObxwKxecE17j0URxuTrYZZME4/VH57Hg==} + '@rollup/rollup-linux-riscv64-musl@4.46.2': + resolution: {integrity: sha512-t5B2loThlFEauloaQkZg9gxV05BYeITLvLkWOkRXogP4qHXLkWSbSHKM9S6H1schf/0YGP/qNKtiISlxvfmmZw==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.40.2': - resolution: {integrity: sha512-B7LKIz+0+p348JoAL4X/YxGx9zOx3sR+o6Hj15Y3aaApNfAshK8+mWZEf759DXfRLeL2vg5LYJBB7DdcleYCoQ==} + '@rollup/rollup-linux-s390x-gnu@4.46.2': + resolution: {integrity: sha512-YKjekwTEKgbB7n17gmODSmJVUIvj8CX7q5442/CK80L8nqOUbMtf8b01QkG3jOqyr1rotrAnW6B/qiHwfcuWQA==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.40.2': - resolution: {integrity: sha512-lG7Xa+BmBNwpjmVUbmyKxdQJ3Q6whHjMjzQplOs5Z+Gj7mxPtWakGHqzMqNER68G67kmCX9qX57aRsW5V0VOng==} + '@rollup/rollup-linux-x64-gnu@4.46.2': + resolution: {integrity: sha512-Jj5a9RUoe5ra+MEyERkDKLwTXVu6s3aACP51nkfnK9wJTraCC8IMe3snOfALkrjTYd2G1ViE1hICj0fZ7ALBPA==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.40.2': - resolution: {integrity: sha512-tD46wKHd+KJvsmije4bUskNuvWKFcTOIM9tZ/RrmIvcXnbi0YK/cKS9FzFtAm7Oxi2EhV5N2OpfFB348vSQRXA==} + '@rollup/rollup-linux-x64-musl@4.46.2': + resolution: {integrity: sha512-7kX69DIrBeD7yNp4A5b81izs8BqoZkCIaxQaOpumcJ1S/kmqNFjPhDu1LHeVXv0SexfHQv5cqHsxLOjETuqDuA==} cpu: [x64] os: [linux] - '@rollup/rollup-win32-arm64-msvc@4.40.2': - resolution: {integrity: sha512-Bjv/HG8RRWLNkXwQQemdsWw4Mg+IJ29LK+bJPW2SCzPKOUaMmPEppQlu/Fqk1d7+DX3V7JbFdbkh/NMmurT6Pg==} + '@rollup/rollup-win32-arm64-msvc@4.46.2': + resolution: {integrity: sha512-wiJWMIpeaak/jsbaq2HMh/rzZxHVW1rU6coyeNNpMwk5isiPjSTx0a4YLSlYDwBH/WBvLz+EtsNqQScZTLJy3g==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.40.2': - resolution: {integrity: sha512-dt1llVSGEsGKvzeIO76HToiYPNPYPkmjhMHhP00T9S4rDern8P2ZWvWAQUEJ+R1UdMWJ/42i/QqJ2WV765GZcA==} + '@rollup/rollup-win32-ia32-msvc@4.46.2': + resolution: {integrity: sha512-gBgaUDESVzMgWZhcyjfs9QFK16D8K6QZpwAaVNJxYDLHWayOta4ZMjGm/vsAEy3hvlS2GosVFlBlP9/Wb85DqQ==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.40.2': - resolution: {integrity: sha512-bwspbWB04XJpeElvsp+DCylKfF4trJDa2Y9Go8O6A7YLX2LIKGcNK/CYImJN6ZP4DcuOHB4Utl3iCbnR62DudA==} + '@rollup/rollup-win32-x64-msvc@4.46.2': + resolution: {integrity: sha512-CvUo2ixeIQGtF6WvuB87XWqPQkoFAFqW+HUo/WzHwuHDvIwZCtjdWXoYCcr06iKGydiqTclC4jU/TNObC/xKZg==} cpu: [x64] os: [win32] '@rtsao/scc@1.1.0': resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==} - '@sentry-internal/feedback@7.120.3': - resolution: {integrity: sha512-ewJJIQ0mbsOX6jfiVFvqMjokxNtgP3dNwUv+4nenN+iJJPQsM6a0ocro3iscxwVdbkjw5hY3BUV2ICI5Q0UWoA==} + '@sentry-internal/feedback@7.120.4': + resolution: {integrity: sha512-eSwgvTdrh03zYYaI6UVOjI9p4VmKg6+c2+CBQfRZX++6wwnCVsNv7XF7WUIpVGBAkJ0N2oapjQmCzJKGKBRWQg==} engines: {node: '>=12'} - '@sentry-internal/replay-canvas@7.120.3': - resolution: {integrity: sha512-s5xy+bVL1eDZchM6gmaOiXvTqpAsUfO7122DxVdEDMtwVq3e22bS2aiGa8CUgOiJkulZ+09q73nufM77kOmT/A==} + '@sentry-internal/replay-canvas@7.120.4': + resolution: {integrity: sha512-2+W4CgUL1VzrPjArbTid4WhKh7HH21vREVilZdvffQPVwOEpgNTPAb69loQuTlhJVveh9hWTj2nE5UXLbLP+AA==} engines: {node: '>=12'} - '@sentry-internal/tracing@7.120.3': - resolution: {integrity: sha512-Ausx+Jw1pAMbIBHStoQ6ZqDZR60PsCByvHdw/jdH9AqPrNE9xlBSf9EwcycvmrzwyKspSLaB52grlje2cRIUMg==} + '@sentry-internal/tracing@7.120.4': + resolution: {integrity: sha512-Fz5+4XCg3akeoFK+K7g+d7HqGMjmnLoY2eJlpONJmaeT9pXY7yfUyXKZMmMajdE2LxxKJgQ2YKvSCaGVamTjHw==} engines: {node: '>=8'} - '@sentry/babel-plugin-component-annotate@2.23.0': - resolution: {integrity: sha512-+uLqaCKeYmH/W2YUV1XHkFEtpHdx/aFjCQahPVsvXyqg13dfkR6jaygPL4DB5DJtUSmPFCUE3MEk9ZO5JlhJYg==} + '@sentry/babel-plugin-component-annotate@2.23.1': + resolution: {integrity: sha512-l1z8AvI6k9I+2z49OgvP3SlzB1M0Lw24KtceiJibNaSyQwxsItoT9/XftZ/8BBtkosVmNOTQhL1eUsSkuSv1LA==} engines: {node: '>= 14'} - '@sentry/browser@7.120.3': - resolution: {integrity: sha512-i9vGcK9N8zZ/JQo1TCEfHHYZ2miidOvgOABRUc9zQKhYdcYQB2/LU1kqlj77Pxdxf4wOa9137d6rPrSn9iiBxg==} + '@sentry/browser@7.120.4': + resolution: {integrity: sha512-ymlNtIPG6HAKzM/JXpWVGCzCNufZNADfy+O/olZuVJW5Be1DtOFyRnBvz0LeKbmxJbXb2lX/XMhuen6PXPdoQw==} engines: {node: '>=8'} - '@sentry/bundler-plugin-core@2.23.0': - resolution: {integrity: sha512-Qbw+jZFK63w+V193l0eCFKLzGba2Iu93Fx8kCRzZ3uqjky002H8U3pu4mKgcc11J+u8QTjfNZGUyXsxz0jv2mg==} + '@sentry/bundler-plugin-core@2.23.1': + resolution: {integrity: sha512-JA6utNiwMKv6Jfj0Hmk0DI/XUizSHg7HhhkFETKhRlYEhZAdkyz1atDBg0ncKNgRBKyHeSYWcMFtUyo26VB76w==} engines: {node: '>= 14'} '@sentry/cli-darwin@2.39.1': @@ -1602,71 +1610,71 @@ packages: engines: {node: '>= 10'} hasBin: true - '@sentry/core@7.120.3': - resolution: {integrity: sha512-vyy11fCGpkGK3qI5DSXOjgIboBZTriw0YDx/0KyX5CjIjDDNgp5AGgpgFkfZyiYiaU2Ww3iFuKo4wHmBusz1uA==} + '@sentry/core@7.120.4': + resolution: {integrity: sha512-TXu3Q5kKiq8db9OXGkWyXUbIxMMuttB5vJ031yolOl5T/B69JRyAoKuojLBjRv1XX583gS1rSSoX8YXX7ATFGA==} engines: {node: '>=8'} - '@sentry/integrations@7.120.3': - resolution: {integrity: sha512-6i/lYp0BubHPDTg91/uxHvNui427df9r17SsIEXa2eKDwQ9gW2qRx5IWgvnxs2GV/GfSbwcx4swUB3RfEWrXrQ==} + '@sentry/integrations@7.120.4': + resolution: {integrity: sha512-kkBTLk053XlhDCg7OkBQTIMF4puqFibeRO3E3YiVc4PGLnocXMaVpOSCkMqAc1k1kZ09UgGi8DxfQhnFEjUkpA==} engines: {node: '>=8'} - '@sentry/react@7.120.3': - resolution: {integrity: sha512-BcpoK9dwblfb20xwjn/1DRtplvPEXFc3XCRkYSnTfnfZNU8yPOcVX4X2X0I8R+/gsg+MWiFOdEtXJ3FqpJiJ4Q==} + '@sentry/react@7.120.4': + resolution: {integrity: sha512-Pj1MSezEncE+5riuwsk8peMncuz5HR72Yr1/RdZhMZvUxoxAR/tkwD3aPcK6ddQJTagd2TGwhdr9SHuDLtONew==} engines: {node: '>=8'} peerDependencies: react: 15.x || 16.x || 17.x || 18.x - '@sentry/replay@7.120.3': - resolution: {integrity: sha512-CjVq1fP6bpDiX8VQxudD5MPWwatfXk8EJ2jQhJTcWu/4bCSOQmHxnnmBM+GVn5acKUBCodWHBN+IUZgnJheZSg==} + '@sentry/replay@7.120.4': + resolution: {integrity: sha512-FW8sPenNFfnO/K7sncsSTX4rIVak9j7VUiLIagJrcqZIC7d1dInFNjy8CdVJUlyz3Y3TOgIl3L3+ZpjfyMnaZg==} engines: {node: '>=12'} - '@sentry/types@7.120.3': - resolution: {integrity: sha512-C4z+3kGWNFJ303FC+FxAd4KkHvxpNFYAFN8iMIgBwJdpIl25KZ8Q/VdGn0MLLUEHNLvjob0+wvwlcRBBNLXOow==} + '@sentry/types@7.120.4': + resolution: {integrity: sha512-cUq2hSSe6/qrU6oZsEP4InMI5VVdD86aypE+ENrQ6eZEVLTCYm1w6XhW1NvIu3UuWh7gZec4a9J7AFpYxki88Q==} engines: {node: '>=8'} - '@sentry/utils@7.120.3': - resolution: {integrity: sha512-UDAOQJtJDxZHQ5Nm1olycBIsz2wdGX8SdzyGVHmD8EOQYAeDZQyIlQYohDe9nazdIOQLZCIc3fU0G9gqVLkaGQ==} + '@sentry/utils@7.120.4': + resolution: {integrity: sha512-zCKpyDIWKHwtervNK2ZlaK8mMV7gVUijAgFeJStH+CU/imcdquizV3pFLlSQYRswG+Lbyd6CT/LGRh3IbtkCFw==} engines: {node: '>=8'} - '@sentry/vite-plugin@2.23.0': - resolution: {integrity: sha512-iLbqxan3DUkFJqbx7DOtJ2fTd6g+TmNS1PIdaDFfpvVG4Lg9AYp4Xege6BBCrGQYl+wUE3poWfNhASfch/s51Q==} + '@sentry/vite-plugin@2.23.1': + resolution: {integrity: sha512-avtjtIQ019sZW3FklpmNNsQOnYZjCHpnVxgDGElfZb+AaR4AvtHNlxXLJp+iqEfSK+Xok8MJarJqIgCaWcF40Q==} engines: {node: '>= 14'} - '@shikijs/core@3.4.0': - resolution: {integrity: sha512-0YOzTSRDn/IAfQWtK791gs1u8v87HNGToU6IwcA3K7nPoVOrS2Dh6X6A6YfXgPTSkTwR5y6myk0MnI0htjnwrA==} + '@shikijs/core@3.9.2': + resolution: {integrity: sha512-3q/mzmw09B2B6PgFNeiaN8pkNOixWS726IHmJEpjDAcneDPMQmUg2cweT9cWXY4XcyQS3i6mOOUgQz9RRUP6HA==} - '@shikijs/engine-javascript@3.4.0': - resolution: {integrity: sha512-1ywDoe+z/TPQKj9Jw0eU61B003J9DqUFRfH+DVSzdwPUFhR7yOmfyLzUrFz0yw8JxFg/NgzXoQyyykXgO21n5Q==} + '@shikijs/engine-javascript@3.9.2': + resolution: {integrity: sha512-kUTRVKPsB/28H5Ko6qEsyudBiWEDLst+Sfi+hwr59E0GLHV0h8RfgbQU7fdN5Lt9A8R1ulRiZyTvAizkROjwDA==} - '@shikijs/engine-oniguruma@3.4.0': - resolution: {integrity: sha512-zwcWlZ4OQuJ/+1t32ClTtyTU1AiDkK1lhtviRWoq/hFqPjCNyLj22bIg9rB7BfoZKOEOfrsGz7No33BPCf+WlQ==} + '@shikijs/engine-oniguruma@3.9.2': + resolution: {integrity: sha512-Vn/w5oyQ6TUgTVDIC/BrpXwIlfK6V6kGWDVVz2eRkF2v13YoENUvaNwxMsQU/t6oCuZKzqp9vqtEtEzKl9VegA==} - '@shikijs/langs@3.4.0': - resolution: {integrity: sha512-bQkR+8LllaM2duU9BBRQU0GqFTx7TuF5kKlw/7uiGKoK140n1xlLAwCgXwSxAjJ7Htk9tXTFwnnsJTCU5nDPXQ==} + '@shikijs/langs@3.9.2': + resolution: {integrity: sha512-X1Q6wRRQXY7HqAuX3I8WjMscjeGjqXCg/Sve7J2GWFORXkSrXud23UECqTBIdCSNKJioFtmUGJQNKtlMMZMn0w==} - '@shikijs/themes@3.4.0': - resolution: {integrity: sha512-YPP4PKNFcFGLxItpbU0ZW1Osyuk8AyZ24YEFaq04CFsuCbcqydMvMUTi40V2dkc0qs1U2uZFrnU6s5zI6IH+uA==} + '@shikijs/themes@3.9.2': + resolution: {integrity: sha512-6z5lBPBMRfLyyEsgf6uJDHPa6NAGVzFJqH4EAZ+03+7sedYir2yJBRu2uPZOKmj43GyhVHWHvyduLDAwJQfDjA==} - '@shikijs/types@3.4.0': - resolution: {integrity: sha512-EUT/0lGiE//7j5N/yTMNMT3eCWNcHJLrRKxT0NDXWIfdfSmFJKfPX7nMmRBrQnWboAzIsUziCThrYMMhjbMS1A==} + '@shikijs/types@3.9.2': + resolution: {integrity: sha512-/M5L0Uc2ljyn2jKvj4Yiah7ow/W+DJSglVafvWAJ/b8AZDeeRAdMu3c2riDzB7N42VD+jSnWxeP9AKtd4TfYVw==} '@shikijs/vscode-textmate@10.0.2': resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==} - '@tanstack/query-core@5.75.5': - resolution: {integrity: sha512-kPDOxtoMn2Ycycb76Givx2fi+2pzo98F9ifHL/NFiahEDpDwSVW6o12PRuQ0lQnBOunhRG5etatAhQij91M3MQ==} + '@tanstack/query-core@5.85.3': + resolution: {integrity: sha512-9Ne4USX83nHmRuEYs78LW+3lFEEO2hBDHu7mrdIgAFx5Zcrs7ker3n/i8p4kf6OgKExmaDN5oR0efRD7i2J0DQ==} - '@tanstack/query-devtools@5.74.7': - resolution: {integrity: sha512-nSNlfuGdnHf4yB0S+BoNYOE1o3oAH093weAYZolIHfS2stulyA/gWfSk/9H4ZFk5mAAHb5vNqAeJOmbdcGPEQw==} + '@tanstack/query-devtools@5.84.0': + resolution: {integrity: sha512-fbF3n+z1rqhvd9EoGp5knHkv3p5B2Zml1yNRjh7sNXklngYI5RVIWUrUjZ1RIcEoscarUb0+bOvIs5x9dwzOXQ==} - '@tanstack/react-query-devtools@5.75.5': - resolution: {integrity: sha512-S31U00nJOQIbxydRH1kOwdLRaLBrda8O5QjzmgkRg60UZzPGdbI6+873Qa0YGUfPeILDbR2ukgWyg7CJQPy4iA==} + '@tanstack/react-query-devtools@5.85.3': + resolution: {integrity: sha512-WSVweCE1Kh1BVvPDHAmLgGT+GGTJQ9+a7bVqzD+zUiUTht+salJjYm5nikpMNaHFPJV102TCYdvgHgBXtURRNg==} peerDependencies: - '@tanstack/react-query': ^5.75.5 + '@tanstack/react-query': ^5.85.3 react: ^18 || ^19 - '@tanstack/react-query@5.75.5': - resolution: {integrity: sha512-QrLCJe40BgBVlWdAdf2ZEVJ0cISOuEy/HKupId1aTKU6gPJZVhSvZpH+Si7csRflCJphzlQ77Yx6gUxGW9o0XQ==} + '@tanstack/react-query@5.85.3': + resolution: {integrity: sha512-AqU8TvNh5GVIE8I+TUU0noryBRy7gOY0XhSayVXmOPll4UkZeLWKDwi0rtWOZbwLRCbyxorfJ5DIjDqE7GXpcQ==} peerDependencies: react: ^18 || ^19 @@ -1681,8 +1689,8 @@ packages: resolution: {integrity: sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==} engines: {node: '>=12'} - '@tybys/wasm-util@0.9.0': - resolution: {integrity: sha512-6+7nlbMVX/PVDCwaIQ8nTOPveOcFLSt8GcXdx8hD0bt39uWxYT88uXzqTd4fTvqta7oeUJqudepapKNt2DYJFw==} + '@tybys/wasm-util@0.10.0': + resolution: {integrity: sha512-VyyPYFlOMNylG45GoAe0xDoLwWuowvf92F9kySqzYh8vmYm7D2u4iUJKa1tOUpS70Ku13ASrOkS4ScXFsTaCNQ==} '@types/babel__core@7.20.5': resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -1693,8 +1701,8 @@ packages: '@types/babel__template@7.4.4': resolution: {integrity: sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==} - '@types/babel__traverse@7.20.7': - resolution: {integrity: sha512-dkO5fhS7+/oos4ciWxyEyjWe48zmG6wbCheo/G2ZnHx4fs3EU6YC6UM8rk56gAjNJ9P3MTH2jo5jb92/K6wbng==} + '@types/babel__traverse@7.28.0': + resolution: {integrity: sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==} '@types/d3-array@3.0.3': resolution: {integrity: sha512-Reoy+pKnvsksN0lQUlcH6dOGjRZ/3WRwXR//m+/8lt1BXeI4xyaUZoqULNjyXXRuh0Mj4LNpkCvhUpQlY3X5xQ==} @@ -1726,8 +1734,8 @@ packages: '@types/d3-delaunay@6.0.4': resolution: {integrity: sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==} - '@types/d3-dispatch@3.0.6': - resolution: {integrity: sha512-4fvZhzMeeuBJYZXRXrRIQnvUYfyXwYmLsdiN7XXmVNQKKw1cM8a5WdID0g1hVFZDqT9ZqZEY5pD44p24VS7iZQ==} + '@types/d3-dispatch@3.0.7': + resolution: {integrity: sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==} '@types/d3-drag@3.0.7': resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==} @@ -1822,8 +1830,8 @@ packages: '@types/d3@7.4.3': resolution: {integrity: sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==} - '@types/dagre@0.7.52': - resolution: {integrity: sha512-XKJdy+OClLk3hketHi9Qg6gTfe1F3y+UFnHxKA2rn9Dw+oXa4Gb378Ztz9HlMgZKSxpPmn4BNVh9wgkpvrK1uw==} + '@types/dagre@0.7.53': + resolution: {integrity: sha512-f4gkWqzPZvYmKhOsDnhq/R8mO4UMcKdxZo+i5SCkOU1wvGeHJeUXGIHeE9pnwGyPMDof1Vx5ZQo4nxpeg2TTVQ==} '@types/dompurify@3.2.0': resolution: {integrity: sha512-Fgg31wv9QbLDA0SpTOXO3MaxySc4DKGLi8sna4/Utjo4r3ZRPdCt4UQee8BWr+Q5z21yifghREPJGYaEOEIACg==} @@ -1832,8 +1840,8 @@ packages: '@types/eslint@8.56.12': resolution: {integrity: sha512-03ruubjWyOHlmljCVoxSuNDdmfZDzsrrz0P2LeJsOXr+ZwFQ+0yQIwNCwt/GYhV7Z31fgtXJTAEs+FYlEL851g==} - '@types/estree@1.0.7': - resolution: {integrity: sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==} + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} '@types/geojson@7946.0.16': resolution: {integrity: sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==} @@ -1856,17 +1864,17 @@ packages: '@types/mdast@4.0.4': resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} - '@types/node@20.17.43': - resolution: {integrity: sha512-DnDEcDUnVAUYSa7U03QvrXbj1MZj00xoyi/a3lRGkR/c7BFUnqv+OY9EUphMqXUKdZJEOmuzu2mm+LmCisnPow==} + '@types/node@20.19.10': + resolution: {integrity: sha512-iAFpG6DokED3roLSP0K+ybeDdIX6Bc0Vd3mLW5uDqThPWtNos3E+EqOM11mPQHKzfWHqEBuLjIlsBQQ8CsISmQ==} '@types/prismjs@1.26.5': resolution: {integrity: sha512-AUZTa7hQ2KY5L7AmtSiqxlhWxb4ina0yd8hNbl4TWuqnv/pFP0nDMb3YrfSBf4hJVGLh2YEIBfKaBW/9UEl6IQ==} - '@types/prop-types@15.7.14': - resolution: {integrity: sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ==} + '@types/prop-types@15.7.15': + resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} - '@types/qs@6.9.18': - resolution: {integrity: sha512-kK7dgTYDyGqS+e2Q4aK9X3D7q234CIZ1Bv0q/7Z5IwRDoADNU81xXJK/YVyLbLTZCoIwUoDoffFeF+p/eIklAA==} + '@types/qs@6.14.0': + resolution: {integrity: sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==} '@types/react-dom@18.3.7': resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==} @@ -1876,8 +1884,8 @@ packages: '@types/react-syntax-highlighter@15.5.13': resolution: {integrity: sha512-uLGJ87j6Sz8UaBAooU0T6lWJ0dBmjZgN1PZTrj05TNql2/XpC6+4HhMT5syIdFUUt+FASfCeLLv4kBygNU+8qA==} - '@types/react@18.3.21': - resolution: {integrity: sha512-gXLBtmlcRJeT09/sI4PxVwyrku6SaNUj/6cMubjE6T6XdY1fDmBL7r0nX0jbSZPU/Xr0KuwLLZh6aOYY5d91Xw==} + '@types/react@18.3.23': + resolution: {integrity: sha512-/LDXMQh55EzZQ0uVAZmKKhfENivEvWz6E+EYzh+/MCjMhNsotd+ZHhBGIjFDTi6+fz0OhQQQLbTgdQIxxCsC0w==} '@types/semver@7.7.0': resolution: {integrity: sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==} @@ -1955,88 +1963,98 @@ packages: '@ungap/structured-clone@1.3.0': resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} - '@unrs/resolver-binding-darwin-arm64@1.7.2': - resolution: {integrity: sha512-vxtBno4xvowwNmO/ASL0Y45TpHqmNkAaDtz4Jqb+clmcVSSl8XCG/PNFFkGsXXXS6AMjP+ja/TtNCFFa1QwLRg==} + '@unrs/resolver-binding-android-arm-eabi@1.11.1': + resolution: {integrity: sha512-ppLRUgHVaGRWUx0R0Ut06Mjo9gBaBkg3v/8AxusGLhsIotbBLuRk51rAzqLC8gq6NyyAojEXglNjzf6R948DNw==} + cpu: [arm] + os: [android] + + '@unrs/resolver-binding-android-arm64@1.11.1': + resolution: {integrity: sha512-lCxkVtb4wp1v+EoN+HjIG9cIIzPkX5OtM03pQYkG+U5O/wL53LC4QbIeazgiKqluGeVEeBlZahHalCaBvU1a2g==} + cpu: [arm64] + os: [android] + + '@unrs/resolver-binding-darwin-arm64@1.11.1': + resolution: {integrity: sha512-gPVA1UjRu1Y/IsB/dQEsp2V1pm44Of6+LWvbLc9SDk1c2KhhDRDBUkQCYVWe6f26uJb3fOK8saWMgtX8IrMk3g==} cpu: [arm64] os: [darwin] - '@unrs/resolver-binding-darwin-x64@1.7.2': - resolution: {integrity: sha512-qhVa8ozu92C23Hsmv0BF4+5Dyyd5STT1FolV4whNgbY6mj3kA0qsrGPe35zNR3wAN7eFict3s4Rc2dDTPBTuFQ==} + '@unrs/resolver-binding-darwin-x64@1.11.1': + resolution: {integrity: sha512-cFzP7rWKd3lZaCsDze07QX1SC24lO8mPty9vdP+YVa3MGdVgPmFc59317b2ioXtgCMKGiCLxJ4HQs62oz6GfRQ==} cpu: [x64] os: [darwin] - '@unrs/resolver-binding-freebsd-x64@1.7.2': - resolution: {integrity: sha512-zKKdm2uMXqLFX6Ac7K5ElnnG5VIXbDlFWzg4WJ8CGUedJryM5A3cTgHuGMw1+P5ziV8CRhnSEgOnurTI4vpHpg==} + '@unrs/resolver-binding-freebsd-x64@1.11.1': + resolution: {integrity: sha512-fqtGgak3zX4DCB6PFpsH5+Kmt/8CIi4Bry4rb1ho6Av2QHTREM+47y282Uqiu3ZRF5IQioJQ5qWRV6jduA+iGw==} cpu: [x64] os: [freebsd] - '@unrs/resolver-binding-linux-arm-gnueabihf@1.7.2': - resolution: {integrity: sha512-8N1z1TbPnHH+iDS/42GJ0bMPLiGK+cUqOhNbMKtWJ4oFGzqSJk/zoXFzcQkgtI63qMcUI7wW1tq2usZQSb2jxw==} + '@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1': + resolution: {integrity: sha512-u92mvlcYtp9MRKmP+ZvMmtPN34+/3lMHlyMj7wXJDeXxuM0Vgzz0+PPJNsro1m3IZPYChIkn944wW8TYgGKFHw==} cpu: [arm] os: [linux] - '@unrs/resolver-binding-linux-arm-musleabihf@1.7.2': - resolution: {integrity: sha512-tjYzI9LcAXR9MYd9rO45m1s0B/6bJNuZ6jeOxo1pq1K6OBuRMMmfyvJYval3s9FPPGmrldYA3mi4gWDlWuTFGA==} + '@unrs/resolver-binding-linux-arm-musleabihf@1.11.1': + resolution: {integrity: sha512-cINaoY2z7LVCrfHkIcmvj7osTOtm6VVT16b5oQdS4beibX2SYBwgYLmqhBjA1t51CarSaBuX5YNsWLjsqfW5Cw==} cpu: [arm] os: [linux] - '@unrs/resolver-binding-linux-arm64-gnu@1.7.2': - resolution: {integrity: sha512-jon9M7DKRLGZ9VYSkFMflvNqu9hDtOCEnO2QAryFWgT6o6AXU8du56V7YqnaLKr6rAbZBWYsYpikF226v423QA==} + '@unrs/resolver-binding-linux-arm64-gnu@1.11.1': + resolution: {integrity: sha512-34gw7PjDGB9JgePJEmhEqBhWvCiiWCuXsL9hYphDF7crW7UgI05gyBAi6MF58uGcMOiOqSJ2ybEeCvHcq0BCmQ==} cpu: [arm64] os: [linux] - '@unrs/resolver-binding-linux-arm64-musl@1.7.2': - resolution: {integrity: sha512-c8Cg4/h+kQ63pL43wBNaVMmOjXI/X62wQmru51qjfTvI7kmCy5uHTJvK/9LrF0G8Jdx8r34d019P1DVJmhXQpA==} + '@unrs/resolver-binding-linux-arm64-musl@1.11.1': + resolution: {integrity: sha512-RyMIx6Uf53hhOtJDIamSbTskA99sPHS96wxVE/bJtePJJtpdKGXO1wY90oRdXuYOGOTuqjT8ACccMc4K6QmT3w==} cpu: [arm64] os: [linux] - '@unrs/resolver-binding-linux-ppc64-gnu@1.7.2': - resolution: {integrity: sha512-A+lcwRFyrjeJmv3JJvhz5NbcCkLQL6Mk16kHTNm6/aGNc4FwPHPE4DR9DwuCvCnVHvF5IAd9U4VIs/VvVir5lg==} + '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': + resolution: {integrity: sha512-D8Vae74A4/a+mZH0FbOkFJL9DSK2R6TFPC9M+jCWYia/q2einCubX10pecpDiTmkJVUH+y8K3BZClycD8nCShA==} cpu: [ppc64] os: [linux] - '@unrs/resolver-binding-linux-riscv64-gnu@1.7.2': - resolution: {integrity: sha512-hQQ4TJQrSQW8JlPm7tRpXN8OCNP9ez7PajJNjRD1ZTHQAy685OYqPrKjfaMw/8LiHCt8AZ74rfUVHP9vn0N69Q==} + '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': + resolution: {integrity: sha512-frxL4OrzOWVVsOc96+V3aqTIQl1O2TjgExV4EKgRY09AJ9leZpEg8Ak9phadbuX0BA4k8U5qtvMSQQGGmaJqcQ==} cpu: [riscv64] os: [linux] - '@unrs/resolver-binding-linux-riscv64-musl@1.7.2': - resolution: {integrity: sha512-NoAGbiqrxtY8kVooZ24i70CjLDlUFI7nDj3I9y54U94p+3kPxwd2L692YsdLa+cqQ0VoqMWoehDFp21PKRUoIQ==} + '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': + resolution: {integrity: sha512-mJ5vuDaIZ+l/acv01sHoXfpnyrNKOk/3aDoEdLO/Xtn9HuZlDD6jKxHlkN8ZhWyLJsRBxfv9GYM2utQ1SChKew==} cpu: [riscv64] os: [linux] - '@unrs/resolver-binding-linux-s390x-gnu@1.7.2': - resolution: {integrity: sha512-KaZByo8xuQZbUhhreBTW+yUnOIHUsv04P8lKjQ5otiGoSJ17ISGYArc+4vKdLEpGaLbemGzr4ZeUbYQQsLWFjA==} + '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': + resolution: {integrity: sha512-kELo8ebBVtb9sA7rMe1Cph4QHreByhaZ2QEADd9NzIQsYNQpt9UkM9iqr2lhGr5afh885d/cB5QeTXSbZHTYPg==} cpu: [s390x] os: [linux] - '@unrs/resolver-binding-linux-x64-gnu@1.7.2': - resolution: {integrity: sha512-dEidzJDubxxhUCBJ/SHSMJD/9q7JkyfBMT77Px1npl4xpg9t0POLvnWywSk66BgZS/b2Hy9Y1yFaoMTFJUe9yg==} + '@unrs/resolver-binding-linux-x64-gnu@1.11.1': + resolution: {integrity: sha512-C3ZAHugKgovV5YvAMsxhq0gtXuwESUKc5MhEtjBpLoHPLYM+iuwSj3lflFwK3DPm68660rZ7G8BMcwSro7hD5w==} cpu: [x64] os: [linux] - '@unrs/resolver-binding-linux-x64-musl@1.7.2': - resolution: {integrity: sha512-RvP+Ux3wDjmnZDT4XWFfNBRVG0fMsc+yVzNFUqOflnDfZ9OYujv6nkh+GOr+watwrW4wdp6ASfG/e7bkDradsw==} + '@unrs/resolver-binding-linux-x64-musl@1.11.1': + resolution: {integrity: sha512-rV0YSoyhK2nZ4vEswT/QwqzqQXw5I6CjoaYMOX0TqBlWhojUf8P94mvI7nuJTeaCkkds3QE4+zS8Ko+GdXuZtA==} cpu: [x64] os: [linux] - '@unrs/resolver-binding-wasm32-wasi@1.7.2': - resolution: {integrity: sha512-y797JBmO9IsvXVRCKDXOxjyAE4+CcZpla2GSoBQ33TVb3ILXuFnMrbR/QQZoauBYeOFuu4w3ifWLw52sdHGz6g==} + '@unrs/resolver-binding-wasm32-wasi@1.11.1': + resolution: {integrity: sha512-5u4RkfxJm+Ng7IWgkzi3qrFOvLvQYnPBmjmZQ8+szTK/b31fQCnleNl1GgEt7nIsZRIf5PLhPwT0WM+q45x/UQ==} engines: {node: '>=14.0.0'} cpu: [wasm32] - '@unrs/resolver-binding-win32-arm64-msvc@1.7.2': - resolution: {integrity: sha512-gtYTh4/VREVSLA+gHrfbWxaMO/00y+34htY7XpioBTy56YN2eBjkPrY1ML1Zys89X3RJDKVaogzwxlM1qU7egg==} + '@unrs/resolver-binding-win32-arm64-msvc@1.11.1': + resolution: {integrity: sha512-nRcz5Il4ln0kMhfL8S3hLkxI85BXs3o8EYoattsJNdsX4YUU89iOkVn7g0VHSRxFuVMdM4Q1jEpIId1Ihim/Uw==} cpu: [arm64] os: [win32] - '@unrs/resolver-binding-win32-ia32-msvc@1.7.2': - resolution: {integrity: sha512-Ywv20XHvHTDRQs12jd3MY8X5C8KLjDbg/jyaal/QLKx3fAShhJyD4blEANInsjxW3P7isHx1Blt56iUDDJO3jg==} + '@unrs/resolver-binding-win32-ia32-msvc@1.11.1': + resolution: {integrity: sha512-DCEI6t5i1NmAZp6pFonpD5m7i6aFrpofcp4LA2i8IIq60Jyo28hamKBxNrZcyOwVOZkgsRp9O2sXWBWP8MnvIQ==} cpu: [ia32] os: [win32] - '@unrs/resolver-binding-win32-x64-msvc@1.7.2': - resolution: {integrity: sha512-friS8NEQfHaDbkThxopGk+LuE5v3iY0StruifjQEt7SLbA46OnfgMO15sOTkbpJkol6RB+1l1TYPXh0sCddpvA==} + '@unrs/resolver-binding-win32-x64-msvc@1.11.1': + resolution: {integrity: sha512-lrW200hZdbfRtztbygyaq/6jP6AKE8qQN2KvPcJ+x7wiD038YtnYtZ82IMNJ69GJibV7bwL3y9FgK+5w/pYt6g==} cpu: [x64] os: [win32] @@ -2120,19 +2138,19 @@ packages: '@visx/vendor@3.12.0': resolution: {integrity: sha512-SVO+G0xtnL9dsNpGDcjCgoiCnlB3iLSM9KLz1sLbSrV7RaVXwY3/BTm2X9OWN1jH2a9M+eHt6DJ6sE6CXm4cUg==} - '@vitejs/plugin-react@4.4.1': - resolution: {integrity: sha512-IpEm5ZmeXAP/osiBXVVP5KjFMzbWOonMs0NaQQl+xYnUAcq4oHUBsF2+p4MgKWG4YMmFYJU8A6sxRPuowllm6w==} + '@vitejs/plugin-react@4.7.0': + resolution: {integrity: sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: - vite: ^4.2.0 || ^5.0.0 || ^6.0.0 + vite: ^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - acorn@8.14.1: - resolution: {integrity: sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==} + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} engines: {node: '>=0.4.0'} hasBin: true @@ -2188,16 +2206,16 @@ packages: argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - aria-hidden@1.2.4: - resolution: {integrity: sha512-y+CcFFwelSXpLZk/7fMB2mUbGtX9lKycf1MWJ7CaTIERyitVlyQx6C+sxcROU2BAJ24OiZyK+8wj2i8AlBoS3A==} + aria-hidden@1.2.6: + resolution: {integrity: sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==} engines: {node: '>=10'} array-buffer-byte-length@1.0.2: resolution: {integrity: sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==} engines: {node: '>= 0.4'} - array-includes@3.1.8: - resolution: {integrity: sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==} + array-includes@3.1.9: + resolution: {integrity: sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==} engines: {node: '>= 0.4'} array-union@2.1.0: @@ -2259,23 +2277,23 @@ packages: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - brace-expansion@1.1.11: - resolution: {integrity: sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==} + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} - brace-expansion@2.0.1: - resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} - browserslist@4.24.5: - resolution: {integrity: sha512-FDToo4Wo82hIdgc1CQ+NQD0hEhmpPjrZ3hiUgwgOG6IuTdlpr8jdjyG24P6cNP1yJpTLzS5OcGgSw0xmDU1/Tw==} + browserslist@4.25.2: + resolution: {integrity: sha512-0si2SJK3ooGzIawRu61ZdPCO1IncZwS8IzuX73sPZsXW6EQ/w/DAfPyKI8l1ETTCr2MnvqWitmlCUxgdul45jA==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true - c12@3.0.3: - resolution: {integrity: sha512-uC3MacKBb0Z15o5QWCHvHWj5Zv34pGQj9P+iXKSpTuSGFS0KKhUWf4t9AJ+gWjYOdmWCPEGpEzm8sS0iqbpo1w==} + c12@3.2.0: + resolution: {integrity: sha512-ixkEtbYafL56E6HiFuonMm1ZjoKtIo7TH68/uiEq4DAwv9NcUX2nJ95F8TrbMeNjqIkZpruo3ojXQJ+MGG5gcQ==} peerDependencies: magicast: ^0.3.5 peerDependenciesMeta: @@ -2305,8 +2323,8 @@ packages: resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} engines: {node: '>= 6'} - caniuse-lite@1.0.30001717: - resolution: {integrity: sha512-auPpttCq6BDEG8ZAuHJIplGw6GODhjw+/11e7IjpnYCxZcW/ONgPs0KVBJ0d1bY3e2+7PRe5RCLyP+PfwVgkYw==} + caniuse-lite@1.0.30001735: + resolution: {integrity: sha512-EV/laoX7Wq2J9TQlyIXRxTJqIw4sxfXS4OYgudGxBYRuTv0q7AM6yMEpU/Vo1I94thg9U6EZ2NfZx9GJq83u7w==} ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} @@ -2411,8 +2429,9 @@ packages: resolution: {integrity: sha512-p0SaNjrHOnQeR8/VnfGbmg9te2kfyYSQ7Sc/j/6DtPL3JQvKxmjO9TSjNFpujqV3vEYYBvNNvXSxzyksBWAx1Q==} engines: {node: '>=12.0.0'} - cronstrue@2.60.0: - resolution: {integrity: sha512-wyjlMlSaKyRjpDh1WP1Oqcy98zMT0OPJlBgffIr8Edx1/ZDpwkTb9IOzHL7CipyW5va7eJ9hC4D1X7gtq8Cuww==} + cronstrue@2.61.0: + resolution: {integrity: sha512-ootN5bvXbIQI9rW94+QsXN5eROtXWwew6NkdGxIRpS/UFWRggL0G5Al7a9GTBFEsuvVhJ2K3CntIIVt7L2ILhA==} + deprecated: Non-backwards compatible Breaking changes hasBin: true cross-spawn@7.0.6: @@ -2540,8 +2559,8 @@ packages: supports-color: optional: true - debug@4.4.0: - resolution: {integrity: sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==} + debug@4.4.1: + resolution: {integrity: sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==} engines: {node: '>=6.0'} peerDependencies: supports-color: '*' @@ -2607,11 +2626,15 @@ packages: dom-helpers@5.2.1: resolution: {integrity: sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==} - dompurify@3.2.5: - resolution: {integrity: sha512-mLPd29uoRe9HpvwP2TxClGQBzGXeEC/we/q+bFlmPPmj2p2Ugl3r6ATu/UU1v77DXNcehiBg9zsr1dREyA/dJQ==} + dompurify@3.2.6: + resolution: {integrity: sha512-/2GogDQlohXPZe6D6NOgQvXLPSYBqIWMnZ8zzOhn09REE4eyAzb+Hed3jhoM9OkuaJ8P6ZGTTVWQKAi8ieIzfQ==} - dotenv@16.5.0: - resolution: {integrity: sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg==} + dotenv@16.6.1: + resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==} + engines: {node: '>=12'} + + dotenv@17.2.1: + resolution: {integrity: sha512-kQhDYKZecqnM0fCnzI5eIv5L4cAe/iRI+HqMbO/hbRdTAeXDG+M9FjipUxNfbARuEg4iHIbhnhs78BCHNbSxEQ==} engines: {node: '>=12'} dunder-proto@1.0.1: @@ -2621,8 +2644,8 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - electron-to-chromium@1.5.150: - resolution: {integrity: sha512-rOOkP2ZUMx1yL4fCxXQKDHQ8ZXwisb2OycOQVKHgvB3ZI4CvehOd4y2tfnnLDieJ3Zs1RL1Dlp3cMkyIn7nnXA==} + electron-to-chromium@1.5.201: + resolution: {integrity: sha512-ZG65vsrLClodGqywuigc+7m0gr4ISoTQttfVh7nfpLv0M7SIwF4WbFNEOywcqTiujs12AUeeXbFyQieDICAIxg==} emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} @@ -2633,8 +2656,8 @@ packages: entities@2.2.0: resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==} - es-abstract@1.23.9: - resolution: {integrity: sha512-py07lI0wjxAC/DcfK1S6G7iANonniZwTISvdPzk9hzeH0IZIshbuuFxLIU96OyF89Yb9hiqWn8M/bY83KY5vzA==} + es-abstract@1.24.0: + resolution: {integrity: sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==} engines: {node: '>= 0.4'} es-define-property@1.0.1: @@ -2668,8 +2691,8 @@ packages: es6-promise@3.3.1: resolution: {integrity: sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==} - esbuild@0.25.4: - resolution: {integrity: sha512-8pgjLUcUjcgDg+2Q4NYXnPbo/vncAY4UmyaCm0jZevERqCHZIaWwdJHkf8XQtu4AxSKCdvrUbT0XUr1IdZzI8Q==} + esbuild@0.25.9: + resolution: {integrity: sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==} engines: {node: '>=18'} hasBin: true @@ -2696,8 +2719,8 @@ packages: eslint: ^7.32.0 || ^8.2.0 eslint-plugin-import: ^2.25.3 - eslint-config-prettier@9.1.0: - resolution: {integrity: sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==} + eslint-config-prettier@9.1.2: + resolution: {integrity: sha512-iI1f+D2ViGn+uvv5HuHVUamg8ll4tN+JRHGc6IJi4TP9Kl976C57fzPXgseXNs8v0iA8aSJpHsTWjDb9QJamGQ==} hasBin: true peerDependencies: eslint: '>=7.0.0' @@ -2718,8 +2741,8 @@ packages: eslint-plugin-import-x: optional: true - eslint-module-utils@2.12.0: - resolution: {integrity: sha512-wALZ0HFoytlyh/1+4wuZ9FJCD/leWHQzzrxJ8+rebyReSLk7LApMyd3WJaLVoN+D5+WIdJyDK1c6JnE65V4Zyg==} + eslint-module-utils@2.12.1: + resolution: {integrity: sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==} engines: {node: '>=4'} peerDependencies: '@typescript-eslint/parser': '*' @@ -2739,8 +2762,8 @@ packages: eslint-import-resolver-webpack: optional: true - eslint-plugin-import@2.31.0: - resolution: {integrity: sha512-ixmkI62Rbc2/w8Vfxyh1jQRTdRTF52VxwRVHl/ykPAmqG+Nb7/kNn+byLP0LxPgI7zWA16Jt82SybJInmMia3A==} + eslint-plugin-import@2.32.0: + resolution: {integrity: sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==} engines: {node: '>=4'} peerDependencies: '@typescript-eslint/parser': '*' @@ -2749,8 +2772,8 @@ packages: '@typescript-eslint/parser': optional: true - eslint-plugin-prettier@5.4.0: - resolution: {integrity: sha512-BvQOvUhkVQM1i63iMETK9Hjud9QhqBnbtT1Zc642p9ynzBuCe5pybkOnvqZIBypXmMlsGcnU4HZ8sCTPfpAexA==} + eslint-plugin-prettier@5.5.4: + resolution: {integrity: sha512-swNtI95SToIz05YINMA6Ox5R057IMAmWZ26GqPxusAp1TZzj+IdY9tXNWWD3vkF/wEqydCONcwjTFpxybBqZsg==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: '@types/eslint': '>=8.0.0' @@ -2838,8 +2861,8 @@ packages: eventemitter3@4.0.7: resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} - exsolve@1.0.5: - resolution: {integrity: sha512-pz5dvkYYKQ1AHVrgOzBKWeP4u4FRb3a6DNK2ucr0OoNwYIU4QWsJ+NM36LLzORT+z845MzKHHhpXiUF5nvQoJg==} + exsolve@1.0.7: + resolution: {integrity: sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==} fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -2876,8 +2899,9 @@ packages: fault@1.0.4: resolution: {integrity: sha512-CJ0HCB5tL5fYTEA7ToAq5+kTwd++Borf1/bifxd9iT70QcXr4MRrO3Llf8Ifs70q+SJcGHFtnIE/Nw6giCtECA==} - fdir@6.4.4: - resolution: {integrity: sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==} + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} peerDependencies: picomatch: ^3 || ^4 peerDependenciesMeta: @@ -2903,8 +2927,8 @@ packages: flatted@3.3.3: resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} - follow-redirects@1.15.9: - resolution: {integrity: sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==} + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} engines: {node: '>=4.0'} peerDependencies: debug: '*' @@ -2973,8 +2997,8 @@ packages: resolution: {integrity: sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==} engines: {node: '>= 0.4'} - get-tsconfig@4.10.0: - resolution: {integrity: sha512-kGzZ3LWWQcGIAmg6iWvXn0ei6WDtV26wzHRMwDSzmAbcXrTEXxHy6IehI6/4eT6VRKyMP1eF1VqwrVUmE/LR7A==} + get-tsconfig@4.10.1: + resolution: {integrity: sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ==} giget@2.0.0: resolution: {integrity: sha512-L5bGsVkxJbJgdnwyuheIunkGatUF/zssUoxxjACCseZYAVbaqdh9Tsmmlkl8vYan09H7sbvKt4pS8GqKLBrEzA==} @@ -3000,10 +3024,6 @@ packages: resolution: {integrity: sha512-e1LleDykUz2Iu+MTYdkSsuWX8lvAjAcs0Xef0lNIu0S2wOAzuTxCJtcd9S3cijlwYF18EsU3rzb8jPVobxDh9Q==} engines: {node: '>=16 || 14 >=14.17'} - globals@11.12.0: - resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} - engines: {node: '>=4'} - globals@13.24.0: resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==} engines: {node: '>=8'} @@ -3189,6 +3209,10 @@ packages: resolution: {integrity: sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==} engines: {node: '>= 0.4'} + is-negative-zero@2.0.3: + resolution: {integrity: sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==} + engines: {node: '>= 0.4'} + is-number-object@1.1.1: resolution: {integrity: sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==} engines: {node: '>= 0.4'} @@ -3254,17 +3278,23 @@ packages: resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==} hasBin: true - jiti@2.4.2: - resolution: {integrity: sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==} + jiti@2.5.1: + resolution: {integrity: sha512-twQoecYPiVA5K/h6SxtORw/Bs3ar+mLUtoPSc7iMXzQzK8d7eJ/R09wmTwAjiamETn1cXYPGfNnu7DMoHgu12w==} hasBin: true - jotai@2.12.4: - resolution: {integrity: sha512-eFXLJol4oOLM8BS1+QV+XwaYQITG8n1tatBCFl4F5HE3zR5j2WIK8QpMt7VJIYmlogNUZfvB7wjwLoVk+umB9Q==} + jotai@2.13.1: + resolution: {integrity: sha512-cRsw6kFeGC9Z/D3egVKrTXRweycZ4z/k7i2MrfCzPYsL9SIWcPXTyqv258/+Ay8VUEcihNiE/coBLE6Kic6b8A==} engines: {node: '>=12.20.0'} peerDependencies: + '@babel/core': '>=7.0.0' + '@babel/template': '>=7.0.0' '@types/react': '>=17.0.0' react: '>=17.0.0' peerDependenciesMeta: + '@babel/core': + optional: true + '@babel/template': + optional: true '@types/react': optional: true react: @@ -3372,16 +3402,16 @@ packages: peerDependencies: react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc - luxon@3.6.1: - resolution: {integrity: sha512-tJLxrKJhO2ukZ5z0gyjY1zPh3Rh88Ej9P7jNrZiHMUXHae1yvI2imgOZtL1TO8TW6biMMKfTtAOoEJANgtWBMQ==} + luxon@3.7.1: + resolution: {integrity: sha512-RkRWjA926cTvz5rAb1BqyWkKbbjzCGchDUIKMCUvNi17j6f6j8uHGDV82Aqcqtzd+icoYpELmG3ksgGiFNNcNg==} engines: {node: '>=12'} magic-string@0.30.8: resolution: {integrity: sha512-ISQTe55T2ao7XtlAStud6qwYPZjE4GK1S/BeVPus4jrq6JuOnQ00YKQC581RWhR122W7msZV263KzVeLoqidyQ==} engines: {node: '>=12'} - markdown-to-jsx@7.7.6: - resolution: {integrity: sha512-/PWFFoKKMidk4Ut06F5hs5sluq1aJ0CGvUJWsnCK6hx/LPM8vlhvKAxtGHJ+U+V2Il2wmnfO6r81ICD3xZRVaw==} + markdown-to-jsx@7.7.13: + resolution: {integrity: sha512-DiueEq2bttFcSxUs85GJcQVrOr0+VVsPfj9AEUPqmExJ3f8P/iQNvZHltV4tm1XVhu1kl0vWBZWT3l99izRMaA==} engines: {node: '>= 10'} peerDependencies: react: '>= 0.14.0' @@ -3453,11 +3483,11 @@ packages: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} engines: {node: '>=16 || 14 >=14.17'} - monaco-editor@0.47.0: - resolution: {integrity: sha512-VabVvHvQ9QmMwXu4du008ZDuyLnHs9j7ThVFsiJoXSOQk18+LF89N4ADzPbFenm0W4V2bGHnFBztIRQTgBfxzw==} + monaco-editor@0.52.2: + resolution: {integrity: sha512-GEQWEZmfkOGLdd3XK8ryrfWz3AIP8YymVXiPHEdewrUq7mh0qrKrfHLNCXcbB6sTnMLnOZ3ztSiKcciFUkIJwQ==} - monaco-themes@0.4.5: - resolution: {integrity: sha512-SqFGy2S1/dVL7ayWV09OTSKtdBDAsfsON56hfvcJgmCSfXpBgWNn3Qb5h7TmJIUoK4PfPHGvQsw1s8IgQo5ftw==} + monaco-themes@0.4.6: + resolution: {integrity: sha512-g8E1CNT6bRyinPSQxVnNrs5b12zmKBpA83l3MEyOETr+KvoyUP4SS1AfHxyxaFBnLiyuyRwoPO4+R4PvzCJzPw==} ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} @@ -3475,8 +3505,8 @@ packages: engines: {node: ^18 || >=20} hasBin: true - napi-postinstall@0.2.3: - resolution: {integrity: sha512-Mi7JISo/4Ij2tDZ2xBE2WH+/KvVlkhA6juEjpEeRAVPNCpN3nxJo/5FhDNKgBcdmcmhaH6JjgST4xY/23ZYK0w==} + napi-postinstall@0.3.3: + resolution: {integrity: sha512-uTp172LLXSxuSYHv/kou+f6KW3SMppU9ivthaVTXian9sOt3XM/zHYHpRZiLgQoxeWfYUnslNWQHF1+G71xcow==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} hasBin: true @@ -3487,8 +3517,8 @@ packages: resolution: {integrity: sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==} engines: {node: 4.x || >=6.0.0} - node-fetch-native@1.6.6: - resolution: {integrity: sha512-8Mc2HhqPdlIfedsuZoc3yioPuzp6b+L5jRCRY1QzuWZh2EGJVQrGppC6V6cF0bLdbW0+O2YpqCA25aF/1lvipQ==} + node-fetch-native@1.6.7: + resolution: {integrity: sha512-g9yhqoedzIUm0nTnTqAQvueMPVOuIY16bqgAJJC8XOOubYFNwz6IER9qs0Gq2Xd0+CecCKFjtdDTMA4u4xG06Q==} node-fetch@2.7.0: resolution: {integrity: sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==} @@ -3513,8 +3543,8 @@ packages: resolution: {integrity: sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==} engines: {node: '>=0.10.0'} - nypm@0.6.0: - resolution: {integrity: sha512-mn8wBFV9G9+UFHIrq+pZ2r2zL4aPau/by3kJb3cM7+5tQHMt6HGQB8FDIeKFYp8o0D2pnH6nVsO88N4AmUxIWg==} + nypm@0.6.1: + resolution: {integrity: sha512-hlacBiRiv1k9hZFiphPUkfSQ/ZfQzZDzC+8z0wL3lvDAOUu/2NnChkKuMoMjNur/9OpKuz2QsIeiPVN0xM5Q0w==} engines: {node: ^14.16.0 || >=16.10.0} hasBin: true @@ -3644,8 +3674,8 @@ packages: resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} engines: {node: '>=8.6'} - picomatch@4.0.2: - resolution: {integrity: sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==} + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} engines: {node: '>=12'} pify@2.3.0: @@ -3656,8 +3686,8 @@ packages: resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} engines: {node: '>= 6'} - pkg-types@2.1.0: - resolution: {integrity: sha512-wmJwA+8ihJixSoHKxZJRBQG1oY8Yr9pGLzRmSsNms0iNWyHHAlZCa7mmKiFR10YPZuz/2k169JiS/inOjBCZ2A==} + pkg-types@2.2.0: + resolution: {integrity: sha512-2SM/GZGAEkPp3KWORxQZns4M+WSeXbC2HEvmOIJe3Cmiv6ieAJvdVhDldtHqM5J1Y7MrR1XhkBT/rMlhh9FdqQ==} possible-typed-array-names@1.1.0: resolution: {integrity: sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==} @@ -3700,8 +3730,8 @@ packages: postcss-value-parser@4.2.0: resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} - postcss@8.5.3: - resolution: {integrity: sha512-dle9A3yYxlBSrt8Fu+IpjGT8SY8hN0mlaA6GY8t0P5PjIOZemULz/E2Bnm/2dcUOena75OTNkHI76uZBNUUq3A==} + postcss@8.5.6: + resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} prelude-ls@1.2.1: @@ -3712,8 +3742,8 @@ packages: resolution: {integrity: sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==} engines: {node: '>=6.0.0'} - prettier@3.5.3: - resolution: {integrity: sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==} + prettier@3.6.2: + resolution: {integrity: sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==} engines: {node: '>=14'} hasBin: true @@ -3736,8 +3766,8 @@ packages: property-information@5.6.0: resolution: {integrity: sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==} - property-information@7.0.0: - resolution: {integrity: sha512-7D/qOz/+Y4X/rzSB6jKxKUsQnphO046ei8qxG59mtM3RG3DHgTK81HrxrmoDVINJb8NKT5ZsRbwHvQ6B68Iyhg==} + property-information@7.1.0: + resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} proxy-from-env@1.1.0: resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} @@ -3767,8 +3797,8 @@ packages: peerDependencies: react: ^18.3.1 - react-hook-form@7.56.2: - resolution: {integrity: sha512-vpfuHuQMF/L6GpuQ4c3ZDo+pRYxIi40gQqsCmmfUBwm+oqvBhKhwghCuj2o00YCgSfU6bR9KC/xnQGWm3Gr08A==} + react-hook-form@7.62.0: + resolution: {integrity: sha512-7KWFejc98xqG/F4bAxpL41NB3o1nnvQO1RWZT3TqRZYL8RryQETGfEdVnJN2fy1crCiBLLjkRBVK05j24FxJGA==} engines: {node: '>=18.0.0'} peerDependencies: react: ^16.8.0 || ^17 || ^18 || ^19 @@ -3808,8 +3838,8 @@ packages: '@types/react': optional: true - react-remove-scroll@2.6.3: - resolution: {integrity: sha512-pnAi91oOk8g8ABQKGF5/M9qxmmOPxaAnopyTHYfqYEwJhyFrbbBtHuSgtKEoH0jpcxx5o3hXqH1mNd9/Oi+8iQ==} + react-remove-scroll@2.7.1: + resolution: {integrity: sha512-HpMh8+oahmIdOuS5aFKKY6Pyog+FNaZV/XyJOq7b4YFwsFHe5yYfdbIalI4k3vU2nSDql7YskmUseHsRrJqIPA==} engines: {node: '>=10'} peerDependencies: '@types/react': '*' @@ -3818,15 +3848,15 @@ packages: '@types/react': optional: true - react-router-dom@6.30.0: - resolution: {integrity: sha512-x30B78HV5tFk8ex0ITwzC9TTZMua4jGyA9IUlH1JLQYQTFyxr/ZxwOJq7evg1JX1qGVUcvhsmQSKdPncQrjTgA==} + react-router-dom@6.30.1: + resolution: {integrity: sha512-llKsgOkZdbPU1Eg3zK8lCn+sjD9wMRZZPuzmdWWX5SUs8OFkN5HnFVC0u5KMeMaC9aoancFI/KoLuKPqN+hxHw==} engines: {node: '>=14.0.0'} peerDependencies: react: '>=16.8' react-dom: '>=16.8' - react-router@6.30.0: - resolution: {integrity: sha512-D3X8FyH9nBcTSHGdEKurK7r8OYE1kKFn3d/CF+CoxbSHkxU7o37+Uh7eAHRXr6k2tSExXYO++07PeXJtA/dEhQ==} + react-router@6.30.1: + resolution: {integrity: sha512-X1m21aEmxGXqENEPG3T6u0Th7g0aS4ZmoNynhbs+Cn+q+QGTLt+d5IQ2bHAXKzKcxGJjxACpVbnYQSCRcfxHlQ==} engines: {node: '>=14.0.0'} peerDependencies: react: '>=16.8' @@ -3891,8 +3921,8 @@ packages: recharts-scale@0.4.5: resolution: {integrity: sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==} - recharts@2.15.3: - resolution: {integrity: sha512-EdOPzTwcFSuqtvkDoaM5ws/Km1+WTAO2eizL7rqiG0V2UVhTnz0m7J2i0CjVPUCdEkZImaWvXLbZDS2H5t6GFQ==} + recharts@2.15.4: + resolution: {integrity: sha512-UT/q6fwS3c1dHbXv2uFgYJ9BMFHu3fwnd7AYZaEQhXuYQ4hgsxLvsUXzGdKeZrW5xopzDCvuA2N41WJ88I7zIw==} engines: {node: '>=14'} peerDependencies: react: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -3968,8 +3998,8 @@ packages: engines: {node: '>=10.0.0'} hasBin: true - rollup@4.40.2: - resolution: {integrity: sha512-tfUOg6DTP4rhQ3VjOO6B4wyrJnGOX85requAXvqYTHsOgb2TFJdZ3aWpT8W2kPoypSGP7dZUyzxJ9ee4buM5Fg==} + rollup@4.46.2: + resolution: {integrity: sha512-WMmLFI+Boh6xbop+OAGo9cQ3OgX9MIg7xOQjn+pTCwOkk+FNDAeAemXkJ3HzDJrVXleLOFVa1ipuc1AmEx1Dwg==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -3995,8 +4025,8 @@ packages: resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} hasBin: true - semver@7.7.1: - resolution: {integrity: sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==} + semver@7.7.2: + resolution: {integrity: sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==} engines: {node: '>=10'} hasBin: true @@ -4020,8 +4050,8 @@ packages: resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} engines: {node: '>=8'} - shiki@3.4.0: - resolution: {integrity: sha512-Ni80XHcqhOEXv5mmDAvf5p6PAJqbUc/RzFeaOqk+zP5DLvTPS3j0ckvA+MI87qoxTQ5RGJDVTbdl/ENLSyyAnQ==} + shiki@3.9.2: + resolution: {integrity: sha512-t6NKl5e/zGTvw/IyftLcumolgOczhuroqwXngDeMqJ3h3EQiTY/7wmfgPlsmloD8oYfqkEDqxiaH37Pjm1zUhQ==} should-equal@2.0.0: resolution: {integrity: sha512-ZP36TMrK9euEuWQYBig9W55WPC7uo37qzAEmbjHz4gfyuXrEUgF8cUvQVO+w+d3OMfPvSRQJ22lSm8MQJ43LTA==} @@ -4081,6 +4111,10 @@ packages: state-local@1.0.7: resolution: {integrity: sha512-HTEHMNieakEnoe33shBYcZ7NX83ACUjCu8c40iOGEZsngj9zRnkqS9j1pqQPXwobB0ZcVTk27REb7COQ0UR59w==} + stop-iteration-iterator@1.1.0: + resolution: {integrity: sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==} + engines: {node: '>= 0.4'} + string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -4143,17 +4177,17 @@ packages: swagger-schema-official@2.0.0-bab6bed: resolution: {integrity: sha512-rCC0NWGKr/IJhtRuPq/t37qvZHI/mH4I4sxflVM+qgVe5Z2uOCivzWaVbuioJaB61kvm5UvB7b49E+oBY0M8jA==} - swagger-typescript-api@13.1.3: - resolution: {integrity: sha512-WRC3TuhD8j8y7PF/XOwHt7j9MPhACwEciw2hnRjt1n0TYF6FaUtQcB6eVMoAamiLQZIYslJyLSIxvMuhxqzpZA==} - engines: {node: '>=18.0.0'} + swagger-typescript-api@13.2.8: + resolution: {integrity: sha512-TgC8cB2GwrQctiUrLW9cVcPkiH5/zGbUCD3Y5zjeeFAtcCBGxLlBp9Df/UK+ux/oQ2J8x3C5a4TOz+tAZ8jOkQ==} + engines: {node: '>=20'} hasBin: true swagger2openapi@7.0.8: resolution: {integrity: sha512-upi/0ZGkYgEcLeGieoz8gT74oWHA0E7JivX7aN9mAf+Tc7BQoRBvnIGHoPDw+f9TXTW4s6kGYCZJtauP6OYp7g==} hasBin: true - synckit@0.11.4: - resolution: {integrity: sha512-Q/XQKRaJiLiFIBNN+mndW7S/RHxvwzuZS6ZwmRzUBqJBv/5QIKCEwkBC8GBf8EQJKYnaFs0wOZbKTXBPj8L9oQ==} + synckit@0.11.11: + resolution: {integrity: sha512-MeQTA1r0litLUf0Rp/iisCaL8761lKAZHaimlbGK4j0HysC4PLfqygQj9srcs0m2RdtDYnF8UuYyKpbjHYp7Jw==} engines: {node: ^14.18.0 || >=16.0.0} tailwind-merge@2.6.0: @@ -4190,11 +4224,11 @@ packages: tiny-invariant@1.3.3: resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} - tinyexec@0.3.2: - resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + tinyexec@1.0.1: + resolution: {integrity: sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==} - tinyglobby@0.2.13: - resolution: {integrity: sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==} + tinyglobby@0.2.14: + resolution: {integrity: sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==} engines: {node: '>=12.0.0'} to-regex-range@5.0.1: @@ -4228,8 +4262,8 @@ packages: tslib@2.8.1: resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} - tsx@4.19.4: - resolution: {integrity: sha512-gK5GVzDkJK1SI1zwHf32Mqxf2tSJkNx+eYcNly5+nHvWqXUJYUkWBQtKauoESz3ymezAI++ZwT855x5p5eop+Q==} + tsx@4.20.4: + resolution: {integrity: sha512-yyxBKfORQ7LuRt/BQKBXrpcq59ZvSW0XxwfjAt3w2/8PmdxaFzijtMhTawprSHhpzeM5BgU2hXHG3lklIERZXg==} engines: {node: '>=18.0.0'} hasBin: true @@ -4257,8 +4291,8 @@ packages: resolution: {integrity: sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==} engines: {node: '>= 0.4'} - typescript@5.8.3: - resolution: {integrity: sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==} + typescript@5.9.2: + resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} engines: {node: '>=14.17'} hasBin: true @@ -4266,8 +4300,8 @@ packages: resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} engines: {node: '>= 0.4'} - undici-types@6.19.8: - resolution: {integrity: sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==} + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} unist-util-is@6.0.0: resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==} @@ -4287,8 +4321,8 @@ packages: unplugin@1.0.1: resolution: {integrity: sha512-aqrHaVBWW1JVKBHmGo33T5TxeL0qWzfvjWokObHA9bYmN7eNDkwOxmLjhioHl9878qDFMAaT51XNroRyuz7WxA==} - unrs-resolver@1.7.2: - resolution: {integrity: sha512-BBKpaylOW8KbHsu378Zky/dGh4ckT/4NW/0SHRABdqRLcQJ2dAOjDo9g97p04sWflm0kqPqpUatxReNV/dqI5A==} + unrs-resolver@1.11.1: + resolution: {integrity: sha512-bSjt9pjaEBnNiGgc9rUiHGKv5l4/TGzDmYw3RhnkJGtLhbnnA/5qJj7x3dNDCRx/PJxu774LlH8lCOlB4hEfKg==} update-browserslist-db@1.1.3: resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==} @@ -4342,8 +4376,8 @@ packages: validate.io-number@1.0.3: resolution: {integrity: sha512-kRAyotcbNaSYoDnXvb4MHg/0a1egJdLwS6oJ38TJY7aw9n93Fl/3blIXdyYvPOp55CNxywooG/3BcrwNrBpcSg==} - vfile-message@4.0.2: - resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==} + vfile-message@4.0.3: + resolution: {integrity: sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==} vfile@6.0.3: resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} @@ -4400,8 +4434,8 @@ packages: webidl-conversions@3.0.1: resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==} - webpack-sources@3.2.3: - resolution: {integrity: sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==} + webpack-sources@3.3.3: + resolution: {integrity: sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==} engines: {node: '>=10.13.0'} webpack-virtual-modules@0.5.0: @@ -4461,9 +4495,9 @@ packages: resolution: {integrity: sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==} engines: {node: '>= 6'} - yaml@2.7.1: - resolution: {integrity: sha512-10ULxpnOCQXxJvBgxsn9ptjq6uviG/htZKk9veJGhlqn3w/DxQ631zFF+nlQXLwmImeS5amR2dl2U8sg6U9jsQ==} - engines: {node: '>= 14'} + yaml@2.8.1: + resolution: {integrity: sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==} + engines: {node: '>= 14.6'} hasBin: true yargs-parser@21.1.1: @@ -4478,11 +4512,11 @@ packages: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} - zod@3.24.4: - resolution: {integrity: sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg==} + zod@3.25.76: + resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} - zustand@4.5.6: - resolution: {integrity: sha512-ibr/n1hBzLLj5Y+yUcU7dYw8p6WnIVzdJbnX+1YpaScvZVF2ziugqHs+LAmHw4lWO9c/zRj+K1ncgWDQuthEdQ==} + zustand@4.5.7: + resolution: {integrity: sha512-CHOUy7mu3lbD6o6LJLfllpjkzhHXSBlX8B9+qPddUsIfeF5S/UZ5q0kmCsnRqT1UHFQZchNFDDzMbQsuesHWlw==} engines: {node: '>=12.7.0'} peerDependencies: '@types/react': '>=16.8' @@ -4505,8 +4539,8 @@ snapshots: '@ampproject/remapping@2.3.0': dependencies: - '@jridgewell/gen-mapping': 0.3.8 - '@jridgewell/trace-mapping': 0.3.25 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.30 '@babel/code-frame@7.27.1': dependencies: @@ -4514,57 +4548,59 @@ snapshots: js-tokens: 4.0.0 picocolors: 1.1.1 - '@babel/compat-data@7.27.2': {} + '@babel/compat-data@7.28.0': {} - '@babel/core@7.27.1': + '@babel/core@7.28.3': dependencies: '@ampproject/remapping': 2.3.0 '@babel/code-frame': 7.27.1 - '@babel/generator': 7.27.1 + '@babel/generator': 7.28.3 '@babel/helper-compilation-targets': 7.27.2 - '@babel/helper-module-transforms': 7.27.1(@babel/core@7.27.1) - '@babel/helpers': 7.27.1 - '@babel/parser': 7.27.2 + '@babel/helper-module-transforms': 7.28.3(@babel/core@7.28.3) + '@babel/helpers': 7.28.3 + '@babel/parser': 7.28.3 '@babel/template': 7.27.2 - '@babel/traverse': 7.27.1 - '@babel/types': 7.27.1 + '@babel/traverse': 7.28.3 + '@babel/types': 7.28.2 convert-source-map: 2.0.0 - debug: 4.4.0 + debug: 4.4.1 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 transitivePeerDependencies: - supports-color - '@babel/generator@7.27.1': + '@babel/generator@7.28.3': dependencies: - '@babel/parser': 7.27.2 - '@babel/types': 7.27.1 - '@jridgewell/gen-mapping': 0.3.8 - '@jridgewell/trace-mapping': 0.3.25 + '@babel/parser': 7.28.3 + '@babel/types': 7.28.2 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.30 jsesc: 3.1.0 '@babel/helper-compilation-targets@7.27.2': dependencies: - '@babel/compat-data': 7.27.2 + '@babel/compat-data': 7.28.0 '@babel/helper-validator-option': 7.27.1 - browserslist: 4.24.5 + browserslist: 4.25.2 lru-cache: 5.1.1 semver: 6.3.1 + '@babel/helper-globals@7.28.0': {} + '@babel/helper-module-imports@7.27.1': dependencies: - '@babel/traverse': 7.27.1 - '@babel/types': 7.27.1 + '@babel/traverse': 7.28.3 + '@babel/types': 7.28.2 transitivePeerDependencies: - supports-color - '@babel/helper-module-transforms@7.27.1(@babel/core@7.27.1)': + '@babel/helper-module-transforms@7.28.3(@babel/core@7.28.3)': dependencies: - '@babel/core': 7.27.1 + '@babel/core': 7.28.3 '@babel/helper-module-imports': 7.27.1 '@babel/helper-validator-identifier': 7.27.1 - '@babel/traverse': 7.27.1 + '@babel/traverse': 7.28.3 transitivePeerDependencies: - supports-color @@ -4576,145 +4612,148 @@ snapshots: '@babel/helper-validator-option@7.27.1': {} - '@babel/helpers@7.27.1': + '@babel/helpers@7.28.3': dependencies: '@babel/template': 7.27.2 - '@babel/types': 7.27.1 + '@babel/types': 7.28.2 - '@babel/parser@7.27.2': + '@babel/parser@7.28.3': dependencies: - '@babel/types': 7.27.1 + '@babel/types': 7.28.2 - '@babel/plugin-transform-react-jsx-self@7.27.1(@babel/core@7.27.1)': + '@babel/plugin-transform-react-jsx-self@7.27.1(@babel/core@7.28.3)': dependencies: - '@babel/core': 7.27.1 + '@babel/core': 7.28.3 '@babel/helper-plugin-utils': 7.27.1 - '@babel/plugin-transform-react-jsx-source@7.27.1(@babel/core@7.27.1)': + '@babel/plugin-transform-react-jsx-source@7.27.1(@babel/core@7.28.3)': dependencies: - '@babel/core': 7.27.1 + '@babel/core': 7.28.3 '@babel/helper-plugin-utils': 7.27.1 - '@babel/runtime@7.27.1': {} + '@babel/runtime@7.28.3': {} '@babel/template@7.27.2': dependencies: '@babel/code-frame': 7.27.1 - '@babel/parser': 7.27.2 - '@babel/types': 7.27.1 + '@babel/parser': 7.28.3 + '@babel/types': 7.28.2 - '@babel/traverse@7.27.1': + '@babel/traverse@7.28.3': dependencies: '@babel/code-frame': 7.27.1 - '@babel/generator': 7.27.1 - '@babel/parser': 7.27.2 + '@babel/generator': 7.28.3 + '@babel/helper-globals': 7.28.0 + '@babel/parser': 7.28.3 '@babel/template': 7.27.2 - '@babel/types': 7.27.1 - debug: 4.4.0 - globals: 11.12.0 + '@babel/types': 7.28.2 + debug: 4.4.1 transitivePeerDependencies: - supports-color - '@babel/types@7.27.1': + '@babel/types@7.28.2': dependencies: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.27.1 - '@biomejs/js-api@0.7.1(@biomejs/wasm-nodejs@1.9.4)': - optionalDependencies: - '@biomejs/wasm-nodejs': 1.9.4 - - '@biomejs/wasm-nodejs@1.9.4': {} - - '@emnapi/core@1.4.3': + '@biomejs/js-api@2.0.3(@biomejs/wasm-nodejs@2.1.4)': dependencies: - '@emnapi/wasi-threads': 1.0.2 + '@biomejs/wasm-nodejs': 2.1.4 + + '@biomejs/wasm-nodejs@2.1.4': {} + + '@emnapi/core@1.4.5': + dependencies: + '@emnapi/wasi-threads': 1.0.4 tslib: 2.8.1 optional: true - '@emnapi/runtime@1.4.3': + '@emnapi/runtime@1.4.5': dependencies: tslib: 2.8.1 optional: true - '@emnapi/wasi-threads@1.0.2': + '@emnapi/wasi-threads@1.0.4': dependencies: tslib: 2.8.1 optional: true - '@esbuild/aix-ppc64@0.25.4': + '@esbuild/aix-ppc64@0.25.9': optional: true - '@esbuild/android-arm64@0.25.4': + '@esbuild/android-arm64@0.25.9': optional: true - '@esbuild/android-arm@0.25.4': + '@esbuild/android-arm@0.25.9': optional: true - '@esbuild/android-x64@0.25.4': + '@esbuild/android-x64@0.25.9': optional: true - '@esbuild/darwin-arm64@0.25.4': + '@esbuild/darwin-arm64@0.25.9': optional: true - '@esbuild/darwin-x64@0.25.4': + '@esbuild/darwin-x64@0.25.9': optional: true - '@esbuild/freebsd-arm64@0.25.4': + '@esbuild/freebsd-arm64@0.25.9': optional: true - '@esbuild/freebsd-x64@0.25.4': + '@esbuild/freebsd-x64@0.25.9': optional: true - '@esbuild/linux-arm64@0.25.4': + '@esbuild/linux-arm64@0.25.9': optional: true - '@esbuild/linux-arm@0.25.4': + '@esbuild/linux-arm@0.25.9': optional: true - '@esbuild/linux-ia32@0.25.4': + '@esbuild/linux-ia32@0.25.9': optional: true - '@esbuild/linux-loong64@0.25.4': + '@esbuild/linux-loong64@0.25.9': optional: true - '@esbuild/linux-mips64el@0.25.4': + '@esbuild/linux-mips64el@0.25.9': optional: true - '@esbuild/linux-ppc64@0.25.4': + '@esbuild/linux-ppc64@0.25.9': optional: true - '@esbuild/linux-riscv64@0.25.4': + '@esbuild/linux-riscv64@0.25.9': optional: true - '@esbuild/linux-s390x@0.25.4': + '@esbuild/linux-s390x@0.25.9': optional: true - '@esbuild/linux-x64@0.25.4': + '@esbuild/linux-x64@0.25.9': optional: true - '@esbuild/netbsd-arm64@0.25.4': + '@esbuild/netbsd-arm64@0.25.9': optional: true - '@esbuild/netbsd-x64@0.25.4': + '@esbuild/netbsd-x64@0.25.9': optional: true - '@esbuild/openbsd-arm64@0.25.4': + '@esbuild/openbsd-arm64@0.25.9': optional: true - '@esbuild/openbsd-x64@0.25.4': + '@esbuild/openbsd-x64@0.25.9': optional: true - '@esbuild/sunos-x64@0.25.4': + '@esbuild/openharmony-arm64@0.25.9': optional: true - '@esbuild/win32-arm64@0.25.4': + '@esbuild/sunos-x64@0.25.9': optional: true - '@esbuild/win32-ia32@0.25.4': + '@esbuild/win32-arm64@0.25.9': optional: true - '@esbuild/win32-x64@0.25.4': + '@esbuild/win32-ia32@0.25.9': + optional: true + + '@esbuild/win32-x64@0.25.9': optional: true '@eslint-community/eslint-utils@4.7.0(eslint@8.57.1)': @@ -4727,7 +4766,7 @@ snapshots: '@eslint/eslintrc@2.1.4': dependencies: ajv: 6.12.6 - debug: 4.4.0 + debug: 4.4.1 espree: 9.6.1 globals: 13.24.0 ignore: 5.3.2 @@ -4742,35 +4781,35 @@ snapshots: '@exodus/schemasafe@1.3.0': {} - '@floating-ui/core@1.7.0': + '@floating-ui/core@1.7.3': dependencies: - '@floating-ui/utils': 0.2.9 + '@floating-ui/utils': 0.2.10 - '@floating-ui/dom@1.7.0': + '@floating-ui/dom@1.7.3': dependencies: - '@floating-ui/core': 1.7.0 - '@floating-ui/utils': 0.2.9 + '@floating-ui/core': 1.7.3 + '@floating-ui/utils': 0.2.10 - '@floating-ui/react-dom@2.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@floating-ui/react-dom@2.1.5(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@floating-ui/dom': 1.7.0 + '@floating-ui/dom': 1.7.3 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@floating-ui/utils@0.2.9': {} + '@floating-ui/utils@0.2.10': {} '@heroicons/react@2.2.0(react@18.3.1)': dependencies: react: 18.3.1 - '@hookform/resolvers@3.10.0(react-hook-form@7.56.2(react@18.3.1))': + '@hookform/resolvers@3.10.0(react-hook-form@7.62.0)': dependencies: - react-hook-form: 7.56.2(react@18.3.1) + react-hook-form: 7.62.0(react@18.3.1) '@humanwhocodes/config-array@0.13.0': dependencies: '@humanwhocodes/object-schema': 2.0.3 - debug: 4.4.0 + debug: 4.4.1 minimatch: 3.1.2 transitivePeerDependencies: - supports-color @@ -4788,44 +4827,41 @@ snapshots: wrap-ansi: 8.1.0 wrap-ansi-cjs: wrap-ansi@7.0.0 - '@jridgewell/gen-mapping@0.3.8': + '@jridgewell/gen-mapping@0.3.13': dependencies: - '@jridgewell/set-array': 1.2.1 - '@jridgewell/sourcemap-codec': 1.5.0 - '@jridgewell/trace-mapping': 0.3.25 + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.30 '@jridgewell/resolve-uri@3.1.2': {} - '@jridgewell/set-array@1.2.1': {} + '@jridgewell/sourcemap-codec@1.5.5': {} - '@jridgewell/sourcemap-codec@1.5.0': {} - - '@jridgewell/trace-mapping@0.3.25': + '@jridgewell/trace-mapping@0.3.30': dependencies: '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.5 - '@lukemorales/query-key-factory@1.3.4(@tanstack/query-core@5.75.5)(@tanstack/react-query@5.75.5(react@18.3.1))': + '@lukemorales/query-key-factory@1.3.4(@tanstack/query-core@5.85.3)(@tanstack/react-query@5.85.3)': dependencies: - '@tanstack/query-core': 5.75.5 - '@tanstack/react-query': 5.75.5(react@18.3.1) + '@tanstack/query-core': 5.85.3 + '@tanstack/react-query': 5.85.3(react@18.3.1) '@monaco-editor/loader@1.5.0': dependencies: state-local: 1.0.7 - '@monaco-editor/react@4.7.0(monaco-editor@0.47.0)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@monaco-editor/react@4.7.0(monaco-editor@0.52.2)(react-dom@18.3.1)(react@18.3.1)': dependencies: '@monaco-editor/loader': 1.5.0 - monaco-editor: 0.47.0 + monaco-editor: 0.52.2 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@napi-rs/wasm-runtime@0.2.9': + '@napi-rs/wasm-runtime@0.2.12': dependencies: - '@emnapi/core': 1.4.3 - '@emnapi/runtime': 1.4.3 - '@tybys/wasm-util': 0.9.0 + '@emnapi/core': 1.4.5 + '@emnapi/runtime': 1.4.5 + '@tybys/wasm-util': 0.10.0 optional: true '@nodelib/fs.scandir@2.1.5': @@ -4845,271 +4881,255 @@ snapshots: '@pkgjs/parseargs@0.11.0': optional: true - '@pkgr/core@0.2.4': {} + '@pkgr/core@0.2.9': {} '@radix-ui/number@1.1.1': {} '@radix-ui/primitive@1.0.0': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 - '@radix-ui/primitive@1.1.2': {} + '@radix-ui/primitive@1.1.3': {} - '@radix-ui/react-accordion@1.2.10(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-accordion@1.2.12(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-collapsible': 1.1.10(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-collection': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collapsible': 1.1.12(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-alert-dialog@1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-alert-dialog@1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-dialog': 1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-dialog': 1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-arrow@1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-arrow@1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-avatar@1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-avatar@1.1.10(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-is-hydrated': 0.1.0(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-is-hydrated': 0.1.0(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-checkbox@1.3.1(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-checkbox@1.3.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-collapsible@1.1.10(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-collapsible@1.1.12(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-collection@1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-collection@1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) '@radix-ui/react-compose-refs@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 react: 18.3.1 - '@radix-ui/react-compose-refs@1.1.2(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-compose-refs@1.1.2(@types/react@18.3.23)(react@18.3.1)': dependencies: + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 '@radix-ui/react-context@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 react: 18.3.1 - '@radix-ui/react-context@1.1.2(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-context@1.1.2(@types/react@18.3.23)(react@18.3.1)': dependencies: + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-dialog@1.0.0(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-dialog@1.0.0(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 '@radix-ui/primitive': 1.0.0 '@radix-ui/react-compose-refs': 1.0.0(react@18.3.1) '@radix-ui/react-context': 1.0.0(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.0.0(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-focus-guards': 1.0.0(react@18.3.1) - '@radix-ui/react-focus-scope': 1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-focus-scope': 1.0.0(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-id': 1.0.0(react@18.3.1) - '@radix-ui/react-portal': 1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-portal': 1.0.0(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-presence': 1.0.0(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 1.0.0(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-slot': 1.0.0(react@18.3.1) '@radix-ui/react-use-controllable-state': 1.0.0(react@18.3.1) - aria-hidden: 1.2.4 + aria-hidden: 1.2.6 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.5.4(@types/react@18.3.21)(react@18.3.1) + react-remove-scroll: 2.5.4(@types/react@18.3.23)(react@18.3.1) transitivePeerDependencies: - '@types/react' - '@radix-ui/react-dialog@1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-dialog@1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-focus-guards': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-focus-scope': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-portal': 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - aria-hidden: 1.2.4 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) + aria-hidden: 1.2.6 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.6.3(@types/react@18.3.21)(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) + react-remove-scroll: 2.7.1(@types/react@18.3.23)(react@18.3.1) - '@radix-ui/react-direction@1.1.1(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-direction@1.1.1(@types/react@18.3.23)(react@18.3.1)': dependencies: + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-dismissable-layer@1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-dismissable-layer@1.0.0(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 '@radix-ui/primitive': 1.0.0 '@radix-ui/react-compose-refs': 1.0.0(react@18.3.1) - '@radix-ui/react-primitive': 1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 1.0.0(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-use-callback-ref': 1.0.0(react@18.3.1) '@radix-ui/react-use-escape-keydown': 1.0.0(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@radix-ui/react-dismissable-layer@1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-dismissable-layer@1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-escape-keydown': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-dropdown-menu@2.1.14(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-dropdown-menu@2.1.16(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-menu': 2.1.14(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-menu': 2.1.16(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) '@radix-ui/react-focus-guards@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 react: 18.3.1 - '@radix-ui/react-focus-guards@1.1.2(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-focus-guards@1.1.3(@types/react@18.3.23)(react@18.3.1)': dependencies: + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-focus-scope@1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-focus-scope@1.0.0(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 '@radix-ui/react-compose-refs': 1.0.0(react@18.3.1) - '@radix-ui/react-primitive': 1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 1.0.0(react-dom@18.3.1)(react@18.3.1) '@radix-ui/react-use-callback-ref': 1.0.0(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@radix-ui/react-focus-scope@1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-focus-scope@1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-hover-card@1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-hover-card@1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-popper': 1.2.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) '@radix-ui/react-icons@1.3.2(react@18.3.1)': dependencies: @@ -5117,453 +5137,424 @@ snapshots: '@radix-ui/react-id@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 '@radix-ui/react-use-layout-effect': 1.0.0(react@18.3.1) react: 18.3.1 - '@radix-ui/react-id@1.1.1(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-id@1.1.1(@types/react@18.3.23)(react@18.3.1)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-label@2.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-label@2.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-menu@2.1.14(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-menu@2.1.16(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-collection': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-focus-guards': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-focus-scope': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-popper': 1.2.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-roving-focus': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) - aria-hidden: 1.2.4 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) + aria-hidden: 1.2.6 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.6.3(@types/react@18.3.21)(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) + react-remove-scroll: 2.7.1(@types/react@18.3.23)(react@18.3.1) - '@radix-ui/react-menubar@1.1.14(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-menubar@1.1.16(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-collection': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-menu': 2.1.14(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-roving-focus': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-menu': 2.1.16(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-popover@1.1.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-popover@1.1.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-focus-guards': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-focus-scope': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-popper': 1.2.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - aria-hidden: 1.2.4 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) + aria-hidden: 1.2.6 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.6.3(@types/react@18.3.21)(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) + react-remove-scroll: 2.7.1(@types/react@18.3.23)(react@18.3.1) - '@radix-ui/react-popper@1.2.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-popper@1.2.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@floating-ui/react-dom': 2.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-arrow': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-rect': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@floating-ui/react-dom': 2.1.5(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-arrow': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-rect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.23)(react@18.3.1) '@radix-ui/rect': 1.1.1 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - - '@radix-ui/react-portal@1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@babel/runtime': 7.27.1 - '@radix-ui/react-primitive': 1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@radix-ui/react-portal@1.1.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-portal@1.0.0(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@babel/runtime': 7.28.3 + '@radix-ui/react-primitive': 1.0.0(react-dom@18.3.1)(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-presence@1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-portal@1.1.9(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + + '@radix-ui/react-presence@1.0.0(react-dom@18.3.1)(react@18.3.1)': + dependencies: + '@babel/runtime': 7.28.3 '@radix-ui/react-compose-refs': 1.0.0(react@18.3.1) '@radix-ui/react-use-layout-effect': 1.0.0(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@radix-ui/react-presence@1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-presence@1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-primitive@1.0.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-primitive@1.0.0(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 '@radix-ui/react-slot': 1.0.0(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@radix-ui/react-primitive@2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-primitive@2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-slot': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-radio-group@1.3.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-radio-group@1.3.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-roving-focus': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-roving-focus@1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-roving-focus@1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-collection': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-scroll-area@1.2.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-scroll-area@1.2.10(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: '@radix-ui/number': 1.1.1 - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-select@2.2.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-select@2.2.6(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: '@radix-ui/number': 1.1.1 - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-collection': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-focus-guards': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-focus-scope': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-popper': 1.2.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-visually-hidden': 1.2.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - aria-hidden: 1.2.4 + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-focus-guards': 1.1.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-focus-scope': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) + aria-hidden: 1.2.6 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-remove-scroll: 2.6.3(@types/react@18.3.21)(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) + react-remove-scroll: 2.7.1(@types/react@18.3.23)(react@18.3.1) - '@radix-ui/react-separator@1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-separator@1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) '@radix-ui/react-slot@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 '@radix-ui/react-compose-refs': 1.0.0(react@18.3.1) react: 18.3.1 - '@radix-ui/react-slot@1.2.2(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-slot@1.2.3(@types/react@18.3.23)(react@18.3.1)': dependencies: - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-switch@1.2.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-switch@1.2.6(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-previous': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-size': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-tabs@1.1.11(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-tabs@1.1.13(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-direction': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-roving-focus': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-direction': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-roving-focus': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-toast@1.2.13(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-toast@1.2.15(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-collection': 1.1.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-visually-hidden': 1.2.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-collection': 1.1.7(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) - '@radix-ui/react-tooltip@1.2.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-tooltip@1.2.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/primitive': 1.1.2 - '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-context': 1.1.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-dismissable-layer': 1.1.9(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-id': 1.1.1(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-popper': 1.2.6(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-portal': 1.1.8(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-presence': 1.1.4(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-slot': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-visually-hidden': 1.2.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/primitive': 1.1.3 + '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-context': 1.1.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.11(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-id': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-popper': 1.2.8(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-portal': 1.1.9(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-presence': 1.1.5(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@radix-ui/react-slot': 1.2.3(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.2.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-visually-hidden': 1.2.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) '@radix-ui/react-use-callback-ref@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 react: 18.3.1 - '@radix-ui/react-use-callback-ref@1.1.1(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-callback-ref@1.1.1(@types/react@18.3.23)(react@18.3.1)': dependencies: + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 '@radix-ui/react-use-controllable-state@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 '@radix-ui/react-use-callback-ref': 1.0.0(react@18.3.1) react: 18.3.1 - '@radix-ui/react-use-controllable-state@1.2.2(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-controllable-state@1.2.2(@types/react@18.3.23)(react@18.3.1)': dependencies: - '@radix-ui/react-use-effect-event': 0.0.2(@types/react@18.3.21)(react@18.3.1) - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-use-effect-event': 0.0.2(@types/react@18.3.23)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-use-effect-event@0.0.2(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-effect-event@0.0.2(@types/react@18.3.23)(react@18.3.1)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 '@radix-ui/react-use-escape-keydown@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 '@radix-ui/react-use-callback-ref': 1.0.0(react@18.3.1) react: 18.3.1 - '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-escape-keydown@1.1.1(@types/react@18.3.23)(react@18.3.1)': dependencies: - '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-use-is-hydrated@0.1.0(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-is-hydrated@0.1.0(@types/react@18.3.23)(react@18.3.1)': dependencies: + '@types/react': 18.3.23 react: 18.3.1 use-sync-external-store: 1.5.0(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 '@radix-ui/react-use-layout-effect@1.0.0(react@18.3.1)': dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 react: 18.3.1 - '@radix-ui/react-use-layout-effect@1.1.1(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-layout-effect@1.1.1(@types/react@18.3.23)(react@18.3.1)': dependencies: + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-use-previous@1.1.1(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-previous@1.1.1(@types/react@18.3.23)(react@18.3.1)': dependencies: + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-use-rect@1.1.1(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-rect@1.1.1(@types/react@18.3.23)(react@18.3.1)': dependencies: '@radix-ui/rect': 1.1.1 + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-use-size@1.1.1(@types/react@18.3.21)(react@18.3.1)': + '@radix-ui/react-use-size@1.1.1(@types/react@18.3.23)(react@18.3.1)': dependencies: - '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.21)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.1(@types/react@18.3.23)(react@18.3.1) + '@types/react': 18.3.23 react: 18.3.1 - optionalDependencies: - '@types/react': 18.3.21 - '@radix-ui/react-visually-hidden@1.2.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@radix-ui/react-visually-hidden@1.2.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@radix-ui/react-primitive': 2.1.2(@types/react-dom@18.3.7(@types/react@18.3.21))(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 2.1.3(@types/react-dom@18.3.7)(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) '@radix-ui/rect@1.1.1': {} - '@reactflow/background@11.3.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@reactflow/background@11.3.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@reactflow/core': 11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@reactflow/core': 11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) classcat: 5.0.5 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - zustand: 4.5.6(@types/react@18.3.21)(react@18.3.1) + zustand: 4.5.7(@types/react@18.3.23)(react@18.3.1) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/controls@11.2.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@reactflow/controls@11.2.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@reactflow/core': 11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@reactflow/core': 11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) classcat: 5.0.5 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - zustand: 4.5.6(@types/react@18.3.21)(react@18.3.1) + zustand: 4.5.7(@types/react@18.3.23)(react@18.3.1) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/core@11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@reactflow/core@11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: '@types/d3': 7.4.3 '@types/d3-drag': 3.0.7 @@ -5575,14 +5566,14 @@ snapshots: d3-zoom: 3.0.0 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - zustand: 4.5.6(@types/react@18.3.21)(react@18.3.1) + zustand: 4.5.7(@types/react@18.3.23)(react@18.3.1) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/minimap@11.7.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@reactflow/minimap@11.7.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@reactflow/core': 11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@reactflow/core': 11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) '@types/d3-selection': 3.0.11 '@types/d3-zoom': 3.0.8 classcat: 5.0.5 @@ -5590,48 +5581,48 @@ snapshots: d3-zoom: 3.0.0 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - zustand: 4.5.6(@types/react@18.3.21)(react@18.3.1) + zustand: 4.5.7(@types/react@18.3.23)(react@18.3.1) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/node-resizer@2.2.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@reactflow/node-resizer@2.2.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@reactflow/core': 11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@reactflow/core': 11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) classcat: 5.0.5 d3-drag: 3.0.0 d3-selection: 3.0.0 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - zustand: 4.5.6(@types/react@18.3.21)(react@18.3.1) + zustand: 4.5.7(@types/react@18.3.23)(react@18.3.1) transitivePeerDependencies: - '@types/react' - immer - '@reactflow/node-toolbar@1.3.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@reactflow/node-toolbar@1.3.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@reactflow/core': 11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@reactflow/core': 11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) classcat: 5.0.5 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - zustand: 4.5.6(@types/react@18.3.21)(react@18.3.1) + zustand: 4.5.7(@types/react@18.3.23)(react@18.3.1) transitivePeerDependencies: - '@types/react' - immer '@remix-run/router@1.23.0': {} - '@rjsf/core@5.24.10(@rjsf/utils@5.24.10(react@18.3.1))(react@18.3.1)': + '@rjsf/core@5.24.12(@rjsf/utils@5.24.12)(react@18.3.1)': dependencies: - '@rjsf/utils': 5.24.10(react@18.3.1) + '@rjsf/utils': 5.24.12(react@18.3.1) lodash: 4.17.21 lodash-es: 4.17.21 - markdown-to-jsx: 7.7.6(react@18.3.1) + markdown-to-jsx: 7.7.13(react@18.3.1) nanoid: 3.3.11 prop-types: 15.8.1 react: 18.3.1 - '@rjsf/utils@5.24.10(react@18.3.1)': + '@rjsf/utils@5.24.12(react@18.3.1)': dependencies: json-schema-merge-allof: 0.8.1 jsonpointer: 5.0.1 @@ -5640,119 +5631,121 @@ snapshots: react: 18.3.1 react-is: 18.3.1 - '@rjsf/validator-ajv8@5.24.10(@rjsf/utils@5.24.10(react@18.3.1))': + '@rjsf/validator-ajv8@5.24.12(@rjsf/utils@5.24.12)': dependencies: - '@rjsf/utils': 5.24.10(react@18.3.1) + '@rjsf/utils': 5.24.12(react@18.3.1) ajv: 8.17.1 ajv-formats: 2.1.1(ajv@8.17.1) lodash: 4.17.21 lodash-es: 4.17.21 + '@rolldown/pluginutils@1.0.0-beta.27': {} + '@rollup/pluginutils@4.2.1': dependencies: estree-walker: 2.0.2 picomatch: 2.3.1 - '@rollup/rollup-android-arm-eabi@4.40.2': + '@rollup/rollup-android-arm-eabi@4.46.2': optional: true - '@rollup/rollup-android-arm64@4.40.2': + '@rollup/rollup-android-arm64@4.46.2': optional: true - '@rollup/rollup-darwin-arm64@4.40.2': + '@rollup/rollup-darwin-arm64@4.46.2': optional: true - '@rollup/rollup-darwin-x64@4.40.2': + '@rollup/rollup-darwin-x64@4.46.2': optional: true - '@rollup/rollup-freebsd-arm64@4.40.2': + '@rollup/rollup-freebsd-arm64@4.46.2': optional: true - '@rollup/rollup-freebsd-x64@4.40.2': + '@rollup/rollup-freebsd-x64@4.46.2': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.40.2': + '@rollup/rollup-linux-arm-gnueabihf@4.46.2': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.40.2': + '@rollup/rollup-linux-arm-musleabihf@4.46.2': optional: true - '@rollup/rollup-linux-arm64-gnu@4.40.2': + '@rollup/rollup-linux-arm64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-arm64-musl@4.40.2': + '@rollup/rollup-linux-arm64-musl@4.46.2': optional: true - '@rollup/rollup-linux-loongarch64-gnu@4.40.2': + '@rollup/rollup-linux-loongarch64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-powerpc64le-gnu@4.40.2': + '@rollup/rollup-linux-ppc64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.40.2': + '@rollup/rollup-linux-riscv64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-riscv64-musl@4.40.2': + '@rollup/rollup-linux-riscv64-musl@4.46.2': optional: true - '@rollup/rollup-linux-s390x-gnu@4.40.2': + '@rollup/rollup-linux-s390x-gnu@4.46.2': optional: true - '@rollup/rollup-linux-x64-gnu@4.40.2': + '@rollup/rollup-linux-x64-gnu@4.46.2': optional: true - '@rollup/rollup-linux-x64-musl@4.40.2': + '@rollup/rollup-linux-x64-musl@4.46.2': optional: true - '@rollup/rollup-win32-arm64-msvc@4.40.2': + '@rollup/rollup-win32-arm64-msvc@4.46.2': optional: true - '@rollup/rollup-win32-ia32-msvc@4.40.2': + '@rollup/rollup-win32-ia32-msvc@4.46.2': optional: true - '@rollup/rollup-win32-x64-msvc@4.40.2': + '@rollup/rollup-win32-x64-msvc@4.46.2': optional: true '@rtsao/scc@1.1.0': {} - '@sentry-internal/feedback@7.120.3': + '@sentry-internal/feedback@7.120.4': dependencies: - '@sentry/core': 7.120.3 - '@sentry/types': 7.120.3 - '@sentry/utils': 7.120.3 + '@sentry/core': 7.120.4 + '@sentry/types': 7.120.4 + '@sentry/utils': 7.120.4 - '@sentry-internal/replay-canvas@7.120.3': + '@sentry-internal/replay-canvas@7.120.4': dependencies: - '@sentry/core': 7.120.3 - '@sentry/replay': 7.120.3 - '@sentry/types': 7.120.3 - '@sentry/utils': 7.120.3 + '@sentry/core': 7.120.4 + '@sentry/replay': 7.120.4 + '@sentry/types': 7.120.4 + '@sentry/utils': 7.120.4 - '@sentry-internal/tracing@7.120.3': + '@sentry-internal/tracing@7.120.4': dependencies: - '@sentry/core': 7.120.3 - '@sentry/types': 7.120.3 - '@sentry/utils': 7.120.3 + '@sentry/core': 7.120.4 + '@sentry/types': 7.120.4 + '@sentry/utils': 7.120.4 - '@sentry/babel-plugin-component-annotate@2.23.0': {} + '@sentry/babel-plugin-component-annotate@2.23.1': {} - '@sentry/browser@7.120.3': + '@sentry/browser@7.120.4': dependencies: - '@sentry-internal/feedback': 7.120.3 - '@sentry-internal/replay-canvas': 7.120.3 - '@sentry-internal/tracing': 7.120.3 - '@sentry/core': 7.120.3 - '@sentry/integrations': 7.120.3 - '@sentry/replay': 7.120.3 - '@sentry/types': 7.120.3 - '@sentry/utils': 7.120.3 + '@sentry-internal/feedback': 7.120.4 + '@sentry-internal/replay-canvas': 7.120.4 + '@sentry-internal/tracing': 7.120.4 + '@sentry/core': 7.120.4 + '@sentry/integrations': 7.120.4 + '@sentry/replay': 7.120.4 + '@sentry/types': 7.120.4 + '@sentry/utils': 7.120.4 - '@sentry/bundler-plugin-core@2.23.0': + '@sentry/bundler-plugin-core@2.23.1': dependencies: - '@babel/core': 7.27.1 - '@sentry/babel-plugin-component-annotate': 2.23.0 + '@babel/core': 7.28.3 + '@sentry/babel-plugin-component-annotate': 2.23.1 '@sentry/cli': 2.39.1 - dotenv: 16.5.0 + dotenv: 16.6.1 find-up: 5.0.0 glob: 9.3.5 magic-string: 0.30.8 @@ -5801,97 +5794,97 @@ snapshots: - encoding - supports-color - '@sentry/core@7.120.3': + '@sentry/core@7.120.4': dependencies: - '@sentry/types': 7.120.3 - '@sentry/utils': 7.120.3 + '@sentry/types': 7.120.4 + '@sentry/utils': 7.120.4 - '@sentry/integrations@7.120.3': + '@sentry/integrations@7.120.4': dependencies: - '@sentry/core': 7.120.3 - '@sentry/types': 7.120.3 - '@sentry/utils': 7.120.3 + '@sentry/core': 7.120.4 + '@sentry/types': 7.120.4 + '@sentry/utils': 7.120.4 localforage: 1.10.0 - '@sentry/react@7.120.3(react@18.3.1)': + '@sentry/react@7.120.4(react@18.3.1)': dependencies: - '@sentry/browser': 7.120.3 - '@sentry/core': 7.120.3 - '@sentry/types': 7.120.3 - '@sentry/utils': 7.120.3 + '@sentry/browser': 7.120.4 + '@sentry/core': 7.120.4 + '@sentry/types': 7.120.4 + '@sentry/utils': 7.120.4 hoist-non-react-statics: 3.3.2 react: 18.3.1 - '@sentry/replay@7.120.3': + '@sentry/replay@7.120.4': dependencies: - '@sentry-internal/tracing': 7.120.3 - '@sentry/core': 7.120.3 - '@sentry/types': 7.120.3 - '@sentry/utils': 7.120.3 + '@sentry-internal/tracing': 7.120.4 + '@sentry/core': 7.120.4 + '@sentry/types': 7.120.4 + '@sentry/utils': 7.120.4 - '@sentry/types@7.120.3': {} + '@sentry/types@7.120.4': {} - '@sentry/utils@7.120.3': + '@sentry/utils@7.120.4': dependencies: - '@sentry/types': 7.120.3 + '@sentry/types': 7.120.4 - '@sentry/vite-plugin@2.23.0': + '@sentry/vite-plugin@2.23.1': dependencies: - '@sentry/bundler-plugin-core': 2.23.0 + '@sentry/bundler-plugin-core': 2.23.1 unplugin: 1.0.1 transitivePeerDependencies: - encoding - supports-color - '@shikijs/core@3.4.0': + '@shikijs/core@3.9.2': dependencies: - '@shikijs/types': 3.4.0 + '@shikijs/types': 3.9.2 '@shikijs/vscode-textmate': 10.0.2 '@types/hast': 3.0.4 hast-util-to-html: 9.0.5 - '@shikijs/engine-javascript@3.4.0': + '@shikijs/engine-javascript@3.9.2': dependencies: - '@shikijs/types': 3.4.0 + '@shikijs/types': 3.9.2 '@shikijs/vscode-textmate': 10.0.2 oniguruma-to-es: 4.3.3 - '@shikijs/engine-oniguruma@3.4.0': + '@shikijs/engine-oniguruma@3.9.2': dependencies: - '@shikijs/types': 3.4.0 + '@shikijs/types': 3.9.2 '@shikijs/vscode-textmate': 10.0.2 - '@shikijs/langs@3.4.0': + '@shikijs/langs@3.9.2': dependencies: - '@shikijs/types': 3.4.0 + '@shikijs/types': 3.9.2 - '@shikijs/themes@3.4.0': + '@shikijs/themes@3.9.2': dependencies: - '@shikijs/types': 3.4.0 + '@shikijs/types': 3.9.2 - '@shikijs/types@3.4.0': + '@shikijs/types@3.9.2': dependencies: '@shikijs/vscode-textmate': 10.0.2 '@types/hast': 3.0.4 '@shikijs/vscode-textmate@10.0.2': {} - '@tanstack/query-core@5.75.5': {} + '@tanstack/query-core@5.85.3': {} - '@tanstack/query-devtools@5.74.7': {} + '@tanstack/query-devtools@5.84.0': {} - '@tanstack/react-query-devtools@5.75.5(@tanstack/react-query@5.75.5(react@18.3.1))(react@18.3.1)': + '@tanstack/react-query-devtools@5.85.3(@tanstack/react-query@5.85.3)(react@18.3.1)': dependencies: - '@tanstack/query-devtools': 5.74.7 - '@tanstack/react-query': 5.75.5(react@18.3.1) + '@tanstack/query-devtools': 5.84.0 + '@tanstack/react-query': 5.85.3(react@18.3.1) react: 18.3.1 - '@tanstack/react-query@5.75.5(react@18.3.1)': + '@tanstack/react-query@5.85.3(react@18.3.1)': dependencies: - '@tanstack/query-core': 5.75.5 + '@tanstack/query-core': 5.85.3 react: 18.3.1 - '@tanstack/react-table@8.21.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@tanstack/react-table@8.21.3(react-dom@18.3.1)(react@18.3.1)': dependencies: '@tanstack/table-core': 8.21.3 react: 18.3.1 @@ -5899,31 +5892,31 @@ snapshots: '@tanstack/table-core@8.21.3': {} - '@tybys/wasm-util@0.9.0': + '@tybys/wasm-util@0.10.0': dependencies: tslib: 2.8.1 optional: true '@types/babel__core@7.20.5': dependencies: - '@babel/parser': 7.27.2 - '@babel/types': 7.27.1 + '@babel/parser': 7.28.3 + '@babel/types': 7.28.2 '@types/babel__generator': 7.27.0 '@types/babel__template': 7.4.4 - '@types/babel__traverse': 7.20.7 + '@types/babel__traverse': 7.28.0 '@types/babel__generator@7.27.0': dependencies: - '@babel/types': 7.27.1 + '@babel/types': 7.28.2 '@types/babel__template@7.4.4': dependencies: - '@babel/parser': 7.27.2 - '@babel/types': 7.27.1 + '@babel/parser': 7.28.3 + '@babel/types': 7.28.2 - '@types/babel__traverse@7.20.7': + '@types/babel__traverse@7.28.0': dependencies: - '@babel/types': 7.27.1 + '@babel/types': 7.28.2 '@types/d3-array@3.0.3': {} @@ -5952,7 +5945,7 @@ snapshots: '@types/d3-delaunay@6.0.4': {} - '@types/d3-dispatch@3.0.6': {} + '@types/d3-dispatch@3.0.7': {} '@types/d3-drag@3.0.7': dependencies: @@ -6046,7 +6039,7 @@ snapshots: '@types/d3-color': 3.1.3 '@types/d3-contour': 3.0.6 '@types/d3-delaunay': 6.0.4 - '@types/d3-dispatch': 3.0.6 + '@types/d3-dispatch': 3.0.7 '@types/d3-drag': 3.0.7 '@types/d3-dsv': 3.0.7 '@types/d3-ease': 3.0.2 @@ -6070,18 +6063,18 @@ snapshots: '@types/d3-transition': 3.0.9 '@types/d3-zoom': 3.0.8 - '@types/dagre@0.7.52': {} + '@types/dagre@0.7.53': {} '@types/dompurify@3.2.0': dependencies: - dompurify: 3.2.5 + dompurify: 3.2.6 '@types/eslint@8.56.12': dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/json-schema': 7.0.15 - '@types/estree@1.0.7': {} + '@types/estree@1.0.8': {} '@types/geojson@7946.0.16': {} @@ -6103,27 +6096,27 @@ snapshots: dependencies: '@types/unist': 3.0.3 - '@types/node@20.17.43': + '@types/node@20.19.10': dependencies: - undici-types: 6.19.8 + undici-types: 6.21.0 '@types/prismjs@1.26.5': {} - '@types/prop-types@15.7.14': {} + '@types/prop-types@15.7.15': {} - '@types/qs@6.9.18': {} + '@types/qs@6.14.0': {} - '@types/react-dom@18.3.7(@types/react@18.3.21)': + '@types/react-dom@18.3.7(@types/react@18.3.23)': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 '@types/react-syntax-highlighter@15.5.13': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 - '@types/react@18.3.21': + '@types/react@18.3.23': dependencies: - '@types/prop-types': 15.7.14 + '@types/prop-types': 15.7.15 csstype: 3.1.3 '@types/semver@7.7.0': {} @@ -6137,36 +6130,34 @@ snapshots: '@types/unist@3.0.3': {} - '@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3)': + '@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.57.1)(typescript@5.9.2)': dependencies: '@eslint-community/regexpp': 4.12.1 - '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.8.3) + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.2) '@typescript-eslint/scope-manager': 6.21.0 - '@typescript-eslint/type-utils': 6.21.0(eslint@8.57.1)(typescript@5.8.3) - '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.8.3) + '@typescript-eslint/type-utils': 6.21.0(eslint@8.57.1)(typescript@5.9.2) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.9.2) '@typescript-eslint/visitor-keys': 6.21.0 - debug: 4.4.0 + debug: 4.4.1 eslint: 8.57.1 graphemer: 1.4.0 ignore: 5.3.2 natural-compare: 1.4.0 - semver: 7.7.1 - ts-api-utils: 1.4.3(typescript@5.8.3) - optionalDependencies: - typescript: 5.8.3 + semver: 7.7.2 + ts-api-utils: 1.4.3(typescript@5.9.2) + typescript: 5.9.2 transitivePeerDependencies: - supports-color - '@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3)': + '@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.9.2)': dependencies: '@typescript-eslint/scope-manager': 6.21.0 '@typescript-eslint/types': 6.21.0 - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.3) + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.2) '@typescript-eslint/visitor-keys': 6.21.0 - debug: 4.4.0 + debug: 4.4.1 eslint: 8.57.1 - optionalDependencies: - typescript: 5.8.3 + typescript: 5.9.2 transitivePeerDependencies: - supports-color @@ -6175,45 +6166,43 @@ snapshots: '@typescript-eslint/types': 6.21.0 '@typescript-eslint/visitor-keys': 6.21.0 - '@typescript-eslint/type-utils@6.21.0(eslint@8.57.1)(typescript@5.8.3)': + '@typescript-eslint/type-utils@6.21.0(eslint@8.57.1)(typescript@5.9.2)': dependencies: - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.3) - '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.8.3) - debug: 4.4.0 + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.2) + '@typescript-eslint/utils': 6.21.0(eslint@8.57.1)(typescript@5.9.2) + debug: 4.4.1 eslint: 8.57.1 - ts-api-utils: 1.4.3(typescript@5.8.3) - optionalDependencies: - typescript: 5.8.3 + ts-api-utils: 1.4.3(typescript@5.9.2) + typescript: 5.9.2 transitivePeerDependencies: - supports-color '@typescript-eslint/types@6.21.0': {} - '@typescript-eslint/typescript-estree@6.21.0(typescript@5.8.3)': + '@typescript-eslint/typescript-estree@6.21.0(typescript@5.9.2)': dependencies: '@typescript-eslint/types': 6.21.0 '@typescript-eslint/visitor-keys': 6.21.0 - debug: 4.4.0 + debug: 4.4.1 globby: 11.1.0 is-glob: 4.0.3 minimatch: 9.0.3 - semver: 7.7.1 - ts-api-utils: 1.4.3(typescript@5.8.3) - optionalDependencies: - typescript: 5.8.3 + semver: 7.7.2 + ts-api-utils: 1.4.3(typescript@5.9.2) + typescript: 5.9.2 transitivePeerDependencies: - supports-color - '@typescript-eslint/utils@6.21.0(eslint@8.57.1)(typescript@5.8.3)': + '@typescript-eslint/utils@6.21.0(eslint@8.57.1)(typescript@5.9.2)': dependencies: '@eslint-community/eslint-utils': 4.7.0(eslint@8.57.1) '@types/json-schema': 7.0.15 '@types/semver': 7.7.0 '@typescript-eslint/scope-manager': 6.21.0 '@typescript-eslint/types': 6.21.0 - '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.8.3) + '@typescript-eslint/typescript-estree': 6.21.0(typescript@5.9.2) eslint: 8.57.1 - semver: 7.7.1 + semver: 7.7.2 transitivePeerDependencies: - supports-color - typescript @@ -6225,62 +6214,68 @@ snapshots: '@ungap/structured-clone@1.3.0': {} - '@unrs/resolver-binding-darwin-arm64@1.7.2': + '@unrs/resolver-binding-android-arm-eabi@1.11.1': optional: true - '@unrs/resolver-binding-darwin-x64@1.7.2': + '@unrs/resolver-binding-android-arm64@1.11.1': optional: true - '@unrs/resolver-binding-freebsd-x64@1.7.2': + '@unrs/resolver-binding-darwin-arm64@1.11.1': optional: true - '@unrs/resolver-binding-linux-arm-gnueabihf@1.7.2': + '@unrs/resolver-binding-darwin-x64@1.11.1': optional: true - '@unrs/resolver-binding-linux-arm-musleabihf@1.7.2': + '@unrs/resolver-binding-freebsd-x64@1.11.1': optional: true - '@unrs/resolver-binding-linux-arm64-gnu@1.7.2': + '@unrs/resolver-binding-linux-arm-gnueabihf@1.11.1': optional: true - '@unrs/resolver-binding-linux-arm64-musl@1.7.2': + '@unrs/resolver-binding-linux-arm-musleabihf@1.11.1': optional: true - '@unrs/resolver-binding-linux-ppc64-gnu@1.7.2': + '@unrs/resolver-binding-linux-arm64-gnu@1.11.1': optional: true - '@unrs/resolver-binding-linux-riscv64-gnu@1.7.2': + '@unrs/resolver-binding-linux-arm64-musl@1.11.1': optional: true - '@unrs/resolver-binding-linux-riscv64-musl@1.7.2': + '@unrs/resolver-binding-linux-ppc64-gnu@1.11.1': optional: true - '@unrs/resolver-binding-linux-s390x-gnu@1.7.2': + '@unrs/resolver-binding-linux-riscv64-gnu@1.11.1': optional: true - '@unrs/resolver-binding-linux-x64-gnu@1.7.2': + '@unrs/resolver-binding-linux-riscv64-musl@1.11.1': optional: true - '@unrs/resolver-binding-linux-x64-musl@1.7.2': + '@unrs/resolver-binding-linux-s390x-gnu@1.11.1': optional: true - '@unrs/resolver-binding-wasm32-wasi@1.7.2': + '@unrs/resolver-binding-linux-x64-gnu@1.11.1': + optional: true + + '@unrs/resolver-binding-linux-x64-musl@1.11.1': + optional: true + + '@unrs/resolver-binding-wasm32-wasi@1.11.1': dependencies: - '@napi-rs/wasm-runtime': 0.2.9 + '@napi-rs/wasm-runtime': 0.2.12 optional: true - '@unrs/resolver-binding-win32-arm64-msvc@1.7.2': + '@unrs/resolver-binding-win32-arm64-msvc@1.11.1': optional: true - '@unrs/resolver-binding-win32-ia32-msvc@1.7.2': + '@unrs/resolver-binding-win32-ia32-msvc@1.11.1': optional: true - '@unrs/resolver-binding-win32-x64-msvc@1.7.2': + '@unrs/resolver-binding-win32-x64-msvc@1.11.1': optional: true '@visx/axis@3.12.0(react@18.3.1)': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 '@visx/group': 3.12.0(react@18.3.1) '@visx/point': 3.12.0 '@visx/scale': 3.12.0 @@ -6290,10 +6285,10 @@ snapshots: prop-types: 15.8.1 react: 18.3.1 - '@visx/bounds@3.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@visx/bounds@3.12.0(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@types/react': 18.3.21 - '@types/react-dom': 18.3.7(@types/react@18.3.21) + '@types/react': 18.3.23 + '@types/react-dom': 18.3.7(@types/react@18.3.23) prop-types: 15.8.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) @@ -6316,7 +6311,7 @@ snapshots: '@visx/drag@3.12.0(react@18.3.1)': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 '@visx/event': 3.12.0 '@visx/point': 3.12.0 prop-types: 15.8.1 @@ -6324,18 +6319,18 @@ snapshots: '@visx/event@3.12.0': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 '@visx/point': 3.12.0 '@visx/gradient@3.12.0(react@18.3.1)': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 prop-types: 15.8.1 react: 18.3.1 '@visx/grid@3.12.0(react@18.3.1)': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 '@visx/curve': 3.12.0 '@visx/group': 3.12.0(react@18.3.1) '@visx/point': 3.12.0 @@ -6347,7 +6342,7 @@ snapshots: '@visx/group@3.12.0(react@18.3.1)': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 classnames: 2.5.1 prop-types: 15.8.1 react: 18.3.1 @@ -6359,7 +6354,7 @@ snapshots: '@visx/pattern@3.12.0(react@18.3.1)': dependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 classnames: 2.5.1 prop-types: 15.8.1 react: 18.3.1 @@ -6369,7 +6364,7 @@ snapshots: '@visx/responsive@3.12.0(react@18.3.1)': dependencies: '@types/lodash': 4.17.20 - '@types/react': 18.3.21 + '@types/react': 18.3.23 lodash: 4.17.21 prop-types: 15.8.1 react: 18.3.1 @@ -6383,7 +6378,7 @@ snapshots: '@types/d3-path': 1.0.11 '@types/d3-shape': 1.3.12 '@types/lodash': 4.17.20 - '@types/react': 18.3.21 + '@types/react': 18.3.23 '@visx/curve': 3.12.0 '@visx/group': 3.12.0(react@18.3.1) '@visx/scale': 3.12.0 @@ -6397,22 +6392,22 @@ snapshots: '@visx/text@3.12.0(react@18.3.1)': dependencies: '@types/lodash': 4.17.20 - '@types/react': 18.3.21 + '@types/react': 18.3.23 classnames: 2.5.1 lodash: 4.17.21 prop-types: 15.8.1 react: 18.3.1 reduce-css-calc: 1.3.0 - '@visx/tooltip@3.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@visx/tooltip@3.12.0(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@types/react': 18.3.21 - '@visx/bounds': 3.12.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@types/react': 18.3.23 + '@visx/bounds': 3.12.0(react-dom@18.3.1)(react@18.3.1) classnames: 2.5.1 prop-types: 15.8.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-use-measure: 2.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-use-measure: 2.1.7(react-dom@18.3.1)(react@18.3.1) '@visx/vendor@3.12.0': dependencies: @@ -6436,31 +6431,32 @@ snapshots: d3-time-format: 4.1.0 internmap: 2.0.3 - '@vitejs/plugin-react@4.4.1(vite@6.3.5(@types/node@20.17.43)(jiti@2.4.2)(tsx@4.19.4)(yaml@2.7.1))': + '@vitejs/plugin-react@4.7.0(vite@6.3.5)': dependencies: - '@babel/core': 7.27.1 - '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.27.1) - '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.27.1) + '@babel/core': 7.28.3 + '@babel/plugin-transform-react-jsx-self': 7.27.1(@babel/core@7.28.3) + '@babel/plugin-transform-react-jsx-source': 7.27.1(@babel/core@7.28.3) + '@rolldown/pluginutils': 1.0.0-beta.27 '@types/babel__core': 7.20.5 react-refresh: 0.17.0 - vite: 6.3.5(@types/node@20.17.43)(jiti@2.4.2)(tsx@4.19.4)(yaml@2.7.1) + vite: 6.3.5(@types/node@20.19.10)(tsx@4.20.4) transitivePeerDependencies: - supports-color - acorn-jsx@5.3.2(acorn@8.14.1): + acorn-jsx@5.3.2(acorn@8.15.0): dependencies: - acorn: 8.14.1 + acorn: 8.15.0 - acorn@8.14.1: {} + acorn@8.15.0: {} agent-base@6.0.2: dependencies: - debug: 4.4.0 + debug: 4.4.1 transitivePeerDependencies: - supports-color ajv-formats@2.1.1(ajv@8.17.1): - optionalDependencies: + dependencies: ajv: 8.17.1 ajv@6.12.6: @@ -6502,7 +6498,7 @@ snapshots: argparse@2.0.1: {} - aria-hidden@1.2.4: + aria-hidden@1.2.6: dependencies: tslib: 2.8.1 @@ -6511,14 +6507,16 @@ snapshots: call-bound: 1.0.4 is-array-buffer: 3.0.5 - array-includes@3.1.8: + array-includes@3.1.9: dependencies: call-bind: 1.0.8 + call-bound: 1.0.4 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-object-atoms: 1.1.1 get-intrinsic: 1.3.0 is-string: 1.1.1 + math-intrinsics: 1.1.0 array-union@2.1.0: {} @@ -6526,7 +6524,7 @@ snapshots: dependencies: call-bind: 1.0.8 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-errors: 1.3.0 es-object-atoms: 1.1.1 es-shim-unscopables: 1.1.0 @@ -6536,7 +6534,7 @@ snapshots: call-bind: 1.0.8 call-bound: 1.0.4 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-errors: 1.3.0 es-object-atoms: 1.1.1 es-shim-unscopables: 1.1.0 @@ -6545,21 +6543,21 @@ snapshots: dependencies: call-bind: 1.0.8 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-shim-unscopables: 1.1.0 array.prototype.flatmap@1.3.3: dependencies: call-bind: 1.0.8 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-shim-unscopables: 1.1.0 array.prototype.tosorted@1.1.4: dependencies: call-bind: 1.0.8 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-errors: 1.3.0 es-shim-unscopables: 1.1.0 @@ -6568,7 +6566,7 @@ snapshots: array-buffer-byte-length: 1.0.2 call-bind: 1.0.8 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-errors: 1.3.0 get-intrinsic: 1.3.0 is-array-buffer: 3.0.5 @@ -6577,14 +6575,14 @@ snapshots: asynckit@0.4.0: {} - autoprefixer@10.4.21(postcss@8.5.3): + autoprefixer@10.4.21(postcss@8.5.6): dependencies: - browserslist: 4.24.5 - caniuse-lite: 1.0.30001717 + browserslist: 4.25.2 + caniuse-lite: 1.0.30001735 fraction.js: 4.3.7 normalize-range: 0.1.2 picocolors: 1.1.1 - postcss: 8.5.3 + postcss: 8.5.6 postcss-value-parser: 4.2.0 available-typed-arrays@1.0.7: @@ -6593,7 +6591,7 @@ snapshots: axios@1.11.0: dependencies: - follow-redirects: 1.15.9 + follow-redirects: 1.15.11 form-data: 4.0.4 proxy-from-env: 1.1.0 transitivePeerDependencies: @@ -6605,12 +6603,12 @@ snapshots: binary-extensions@2.3.0: {} - brace-expansion@1.1.11: + brace-expansion@1.1.12: dependencies: balanced-match: 1.0.2 concat-map: 0.0.1 - brace-expansion@2.0.1: + brace-expansion@2.0.2: dependencies: balanced-match: 1.0.2 @@ -6618,26 +6616,26 @@ snapshots: dependencies: fill-range: 7.1.1 - browserslist@4.24.5: + browserslist@4.25.2: dependencies: - caniuse-lite: 1.0.30001717 - electron-to-chromium: 1.5.150 + caniuse-lite: 1.0.30001735 + electron-to-chromium: 1.5.201 node-releases: 2.0.19 - update-browserslist-db: 1.1.3(browserslist@4.24.5) + update-browserslist-db: 1.1.3(browserslist@4.25.2) - c12@3.0.3: + c12@3.2.0: dependencies: chokidar: 4.0.3 confbox: 0.2.2 defu: 6.1.4 - dotenv: 16.5.0 - exsolve: 1.0.5 + dotenv: 17.2.1 + exsolve: 1.0.7 giget: 2.0.0 - jiti: 2.4.2 + jiti: 2.5.1 ohash: 2.0.11 pathe: 2.0.3 perfect-debounce: 1.0.0 - pkg-types: 2.1.0 + pkg-types: 2.2.0 rc9: 2.1.2 call-bind-apply-helpers@1.0.2: @@ -6663,7 +6661,7 @@ snapshots: camelcase-css@2.0.1: {} - caniuse-lite@1.0.30001717: {} + caniuse-lite@1.0.30001735: {} ccount@2.0.1: {} @@ -6718,9 +6716,9 @@ snapshots: clsx@2.1.1: {} - cmdk@0.2.1(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + cmdk@0.2.1(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1): dependencies: - '@radix-ui/react-dialog': 1.0.0(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-dialog': 1.0.0(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) transitivePeerDependencies: @@ -6767,9 +6765,9 @@ snapshots: cron-parser@4.9.0: dependencies: - luxon: 3.6.1 + luxon: 3.7.1 - cronstrue@2.60.0: {} + cronstrue@2.61.0: {} cross-spawn@7.0.6: dependencies: @@ -6894,7 +6892,7 @@ snapshots: dependencies: ms: 2.1.3 - debug@4.4.0: + debug@4.4.1: dependencies: ms: 2.1.3 @@ -6950,14 +6948,16 @@ snapshots: dom-helpers@5.2.1: dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 csstype: 3.1.3 - dompurify@3.2.5: + dompurify@3.2.6: optionalDependencies: '@types/trusted-types': 2.0.7 - dotenv@16.5.0: {} + dotenv@16.6.1: {} + + dotenv@17.2.1: {} dunder-proto@1.0.1: dependencies: @@ -6967,7 +6967,7 @@ snapshots: eastasianwidth@0.2.0: {} - electron-to-chromium@1.5.150: {} + electron-to-chromium@1.5.201: {} emoji-regex@8.0.0: {} @@ -6975,7 +6975,7 @@ snapshots: entities@2.2.0: {} - es-abstract@1.23.9: + es-abstract@1.24.0: dependencies: array-buffer-byte-length: 1.0.2 arraybuffer.prototype.slice: 1.0.4 @@ -7004,7 +7004,9 @@ snapshots: is-array-buffer: 3.0.5 is-callable: 1.2.7 is-data-view: 1.0.2 + is-negative-zero: 2.0.3 is-regex: 1.2.1 + is-set: 2.0.3 is-shared-array-buffer: 1.0.4 is-string: 1.1.1 is-typed-array: 1.1.15 @@ -7019,6 +7021,7 @@ snapshots: safe-push-apply: 1.0.0 safe-regex-test: 1.1.0 set-proto: 1.0.0 + stop-iteration-iterator: 1.1.0 string.prototype.trim: 1.2.10 string.prototype.trimend: 1.0.9 string.prototype.trimstart: 1.0.8 @@ -7038,7 +7041,7 @@ snapshots: call-bind: 1.0.8 call-bound: 1.0.4 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-errors: 1.3.0 es-set-tostringtag: 2.1.0 function-bind: 1.1.2 @@ -7075,56 +7078,57 @@ snapshots: es6-promise@3.3.1: {} - esbuild@0.25.4: + esbuild@0.25.9: optionalDependencies: - '@esbuild/aix-ppc64': 0.25.4 - '@esbuild/android-arm': 0.25.4 - '@esbuild/android-arm64': 0.25.4 - '@esbuild/android-x64': 0.25.4 - '@esbuild/darwin-arm64': 0.25.4 - '@esbuild/darwin-x64': 0.25.4 - '@esbuild/freebsd-arm64': 0.25.4 - '@esbuild/freebsd-x64': 0.25.4 - '@esbuild/linux-arm': 0.25.4 - '@esbuild/linux-arm64': 0.25.4 - '@esbuild/linux-ia32': 0.25.4 - '@esbuild/linux-loong64': 0.25.4 - '@esbuild/linux-mips64el': 0.25.4 - '@esbuild/linux-ppc64': 0.25.4 - '@esbuild/linux-riscv64': 0.25.4 - '@esbuild/linux-s390x': 0.25.4 - '@esbuild/linux-x64': 0.25.4 - '@esbuild/netbsd-arm64': 0.25.4 - '@esbuild/netbsd-x64': 0.25.4 - '@esbuild/openbsd-arm64': 0.25.4 - '@esbuild/openbsd-x64': 0.25.4 - '@esbuild/sunos-x64': 0.25.4 - '@esbuild/win32-arm64': 0.25.4 - '@esbuild/win32-ia32': 0.25.4 - '@esbuild/win32-x64': 0.25.4 + '@esbuild/aix-ppc64': 0.25.9 + '@esbuild/android-arm': 0.25.9 + '@esbuild/android-arm64': 0.25.9 + '@esbuild/android-x64': 0.25.9 + '@esbuild/darwin-arm64': 0.25.9 + '@esbuild/darwin-x64': 0.25.9 + '@esbuild/freebsd-arm64': 0.25.9 + '@esbuild/freebsd-x64': 0.25.9 + '@esbuild/linux-arm': 0.25.9 + '@esbuild/linux-arm64': 0.25.9 + '@esbuild/linux-ia32': 0.25.9 + '@esbuild/linux-loong64': 0.25.9 + '@esbuild/linux-mips64el': 0.25.9 + '@esbuild/linux-ppc64': 0.25.9 + '@esbuild/linux-riscv64': 0.25.9 + '@esbuild/linux-s390x': 0.25.9 + '@esbuild/linux-x64': 0.25.9 + '@esbuild/netbsd-arm64': 0.25.9 + '@esbuild/netbsd-x64': 0.25.9 + '@esbuild/openbsd-arm64': 0.25.9 + '@esbuild/openbsd-x64': 0.25.9 + '@esbuild/openharmony-arm64': 0.25.9 + '@esbuild/sunos-x64': 0.25.9 + '@esbuild/win32-arm64': 0.25.9 + '@esbuild/win32-ia32': 0.25.9 + '@esbuild/win32-x64': 0.25.9 escalade@3.2.0: {} escape-string-regexp@4.0.0: {} - eslint-config-airbnb-base@15.0.0(eslint-plugin-import@2.31.0)(eslint@8.57.1): + eslint-config-airbnb-base@15.0.0(eslint-plugin-import@2.32.0)(eslint@8.57.1): dependencies: confusing-browser-globals: 1.0.11 eslint: 8.57.1 - eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@6.21.0)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) object.assign: 4.1.7 object.entries: 1.1.9 semver: 6.3.1 - eslint-config-airbnb-typescript@17.1.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3))(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-plugin-import@2.31.0)(eslint@8.57.1): + eslint-config-airbnb-typescript@17.1.0(@typescript-eslint/eslint-plugin@6.21.0)(@typescript-eslint/parser@6.21.0)(eslint-plugin-import@2.32.0)(eslint@8.57.1): dependencies: - '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3) - '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.8.3) + '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.57.1)(typescript@5.9.2) + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.2) eslint: 8.57.1 - eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.31.0)(eslint@8.57.1) - eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.32.0)(eslint@8.57.1) + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@6.21.0)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) - eslint-config-prettier@9.1.0(eslint@8.57.1): + eslint-config-prettier@9.1.2(eslint@8.57.1): dependencies: eslint: 8.57.1 @@ -7136,36 +7140,35 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.31.0)(eslint@8.57.1): + eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1): dependencies: '@nolyfill/is-core-module': 1.0.39 - debug: 4.4.0 + debug: 4.4.1 eslint: 8.57.1 - get-tsconfig: 4.10.0 + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@6.21.0)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + get-tsconfig: 4.10.1 is-bun-module: 2.0.0 stable-hash: 0.0.5 - tinyglobby: 0.2.13 - unrs-resolver: 1.7.2 - optionalDependencies: - eslint-plugin-import: 2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + tinyglobby: 0.2.14 + unrs-resolver: 1.11.1 transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): + eslint-module-utils@2.12.1(@typescript-eslint/parser@6.21.0)(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): dependencies: + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.2) debug: 3.2.7 - optionalDependencies: - '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.8.3) eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.31.0)(eslint@8.57.1) + eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1) transitivePeerDependencies: - supports-color - eslint-plugin-import@2.31.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): + eslint-plugin-import@2.32.0(@typescript-eslint/parser@6.21.0)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1): dependencies: '@rtsao/scc': 1.1.0 - array-includes: 3.1.8 + '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.9.2) + array-includes: 3.1.9 array.prototype.findlastindex: 1.2.6 array.prototype.flat: 1.3.3 array.prototype.flatmap: 1.3.3 @@ -7173,7 +7176,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.57.1 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) + eslint-module-utils: 2.12.1(@typescript-eslint/parser@6.21.0)(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -7184,22 +7187,18 @@ snapshots: semver: 6.3.1 string.prototype.trimend: 1.0.9 tsconfig-paths: 3.15.0 - optionalDependencies: - '@typescript-eslint/parser': 6.21.0(eslint@8.57.1)(typescript@5.8.3) transitivePeerDependencies: - eslint-import-resolver-typescript - eslint-import-resolver-webpack - supports-color - eslint-plugin-prettier@5.4.0(@types/eslint@8.56.12)(eslint-config-prettier@9.1.0(eslint@8.57.1))(eslint@8.57.1)(prettier@3.5.3): + eslint-plugin-prettier@5.5.4(eslint-config-prettier@9.1.2)(eslint@8.57.1)(prettier@3.6.2): dependencies: eslint: 8.57.1 - prettier: 3.5.3 + eslint-config-prettier: 9.1.2(eslint@8.57.1) + prettier: 3.6.2 prettier-linter-helpers: 1.0.0 - synckit: 0.11.4 - optionalDependencies: - '@types/eslint': 8.56.12 - eslint-config-prettier: 9.1.0(eslint@8.57.1) + synckit: 0.11.11 eslint-plugin-react-hooks@4.6.2(eslint@8.57.1): dependencies: @@ -7211,7 +7210,7 @@ snapshots: eslint-plugin-react@7.37.5(eslint@8.57.1): dependencies: - array-includes: 3.1.8 + array-includes: 3.1.9 array.prototype.findlast: 1.2.5 array.prototype.flatmap: 1.3.3 array.prototype.tosorted: 1.1.4 @@ -7231,12 +7230,11 @@ snapshots: string.prototype.matchall: 4.0.12 string.prototype.repeat: 1.0.0 - eslint-plugin-unused-imports@3.2.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1): + eslint-plugin-unused-imports@3.2.0(@typescript-eslint/eslint-plugin@6.21.0)(eslint@8.57.1): dependencies: + '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.57.1)(typescript@5.9.2) eslint: 8.57.1 eslint-rule-composer: 0.3.0 - optionalDependencies: - '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.1)(typescript@5.8.3))(eslint@8.57.1)(typescript@5.8.3) eslint-rule-composer@0.3.0: {} @@ -7260,7 +7258,7 @@ snapshots: ajv: 6.12.6 chalk: 4.1.2 cross-spawn: 7.0.6 - debug: 4.4.0 + debug: 4.4.1 doctrine: 3.0.0 escape-string-regexp: 4.0.0 eslint-scope: 7.2.2 @@ -7292,8 +7290,8 @@ snapshots: espree@9.6.1: dependencies: - acorn: 8.14.1 - acorn-jsx: 5.3.2(acorn@8.14.1) + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) eslint-visitor-keys: 3.4.3 esquery@1.6.0: @@ -7314,7 +7312,7 @@ snapshots: eventemitter3@4.0.7: {} - exsolve@1.0.5: {} + exsolve@1.0.7: {} fast-deep-equal@3.1.3: {} @@ -7348,9 +7346,9 @@ snapshots: dependencies: format: 0.2.2 - fdir@6.4.4(picomatch@4.0.2): - optionalDependencies: - picomatch: 4.0.2 + fdir@6.5.0(picomatch@4.0.3): + dependencies: + picomatch: 4.0.3 file-entry-cache@6.0.1: dependencies: @@ -7373,7 +7371,7 @@ snapshots: flatted@3.3.3: {} - follow-redirects@1.15.9: {} + follow-redirects@1.15.11: {} for-each@0.3.5: dependencies: @@ -7444,7 +7442,7 @@ snapshots: es-errors: 1.3.0 get-intrinsic: 1.3.0 - get-tsconfig@4.10.0: + get-tsconfig@4.10.1: dependencies: resolve-pkg-maps: 1.0.0 @@ -7453,8 +7451,8 @@ snapshots: citty: 0.1.6 consola: 3.4.2 defu: 6.1.4 - node-fetch-native: 1.6.6 - nypm: 0.6.0 + node-fetch-native: 1.6.7 + nypm: 0.6.1 pathe: 2.0.3 glob-parent@5.1.2: @@ -7490,8 +7488,6 @@ snapshots: minipass: 4.2.8 path-scurry: 1.11.1 - globals@11.12.0: {} - globals@13.24.0: dependencies: type-fest: 0.20.2 @@ -7551,7 +7547,7 @@ snapshots: hast-util-whitespace: 3.0.0 html-void-elements: 3.0.0 mdast-util-to-hast: 13.2.0 - property-information: 7.0.0 + property-information: 7.1.0 space-separated-tokens: 2.0.2 stringify-entities: 4.0.4 zwitch: 2.0.4 @@ -7583,7 +7579,7 @@ snapshots: https-proxy-agent@5.0.1: dependencies: agent-base: 6.0.2 - debug: 4.4.0 + debug: 4.4.1 transitivePeerDependencies: - supports-color @@ -7649,7 +7645,7 @@ snapshots: is-bun-module@2.0.0: dependencies: - semver: 7.7.1 + semver: 7.7.2 is-callable@1.2.7: {} @@ -7693,6 +7689,8 @@ snapshots: is-map@2.0.3: {} + is-negative-zero@2.0.3: {} + is-number-object@1.1.1: dependencies: call-bound: 1.0.4 @@ -7762,11 +7760,12 @@ snapshots: jiti@1.21.7: {} - jiti@2.4.2: {} + jiti@2.5.1: {} - jotai@2.12.4(@types/react@18.3.21)(react@18.3.1): - optionalDependencies: - '@types/react': 18.3.21 + jotai@2.13.1(@babel/core@7.28.3)(@types/react@18.3.23)(react@18.3.1): + dependencies: + '@babel/core': 7.28.3 + '@types/react': 18.3.23 react: 18.3.1 js-confetti@0.12.0: {} @@ -7807,7 +7806,7 @@ snapshots: jsx-ast-utils@3.3.5: dependencies: - array-includes: 3.1.8 + array-includes: 3.1.9 array.prototype.flat: 1.3.3 object.assign: 4.1.7 object.values: 1.2.1 @@ -7862,13 +7861,13 @@ snapshots: dependencies: react: 18.3.1 - luxon@3.6.1: {} + luxon@3.7.1: {} magic-string@0.30.8: dependencies: - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.5 - markdown-to-jsx@7.7.6(react@18.3.1): + markdown-to-jsx@7.7.13(react@18.3.1): dependencies: react: 18.3.1 @@ -7920,19 +7919,19 @@ snapshots: minimatch@3.1.2: dependencies: - brace-expansion: 1.1.11 + brace-expansion: 1.1.12 minimatch@8.0.4: dependencies: - brace-expansion: 2.0.1 + brace-expansion: 2.0.2 minimatch@9.0.3: dependencies: - brace-expansion: 2.0.1 + brace-expansion: 2.0.2 minimatch@9.0.5: dependencies: - brace-expansion: 2.0.1 + brace-expansion: 2.0.2 minimist@1.2.8: {} @@ -7940,9 +7939,9 @@ snapshots: minipass@7.1.2: {} - monaco-editor@0.47.0: {} + monaco-editor@0.52.2: {} - monaco-themes@0.4.5: + monaco-themes@0.4.6: dependencies: fast-plist: 0.1.3 @@ -7958,7 +7957,7 @@ snapshots: nanoid@5.1.5: {} - napi-postinstall@0.2.3: {} + napi-postinstall@0.3.3: {} natural-compare@1.4.0: {} @@ -7966,7 +7965,7 @@ snapshots: dependencies: http2-client: 1.3.5 - node-fetch-native@1.6.6: {} + node-fetch-native@1.6.7: {} node-fetch@2.7.0: dependencies: @@ -7982,13 +7981,13 @@ snapshots: normalize-range@0.1.2: {} - nypm@0.6.0: + nypm@0.6.1: dependencies: citty: 0.1.6 consola: 3.4.2 pathe: 2.0.3 - pkg-types: 2.1.0 - tinyexec: 0.3.2 + pkg-types: 2.2.0 + tinyexec: 1.0.1 oas-kit-common@1.0.8: dependencies: @@ -8049,14 +8048,14 @@ snapshots: dependencies: call-bind: 1.0.8 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-object-atoms: 1.1.1 object.groupby@1.0.3: dependencies: call-bind: 1.0.8 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 object.values@1.2.1: dependencies: @@ -8140,42 +8139,41 @@ snapshots: picomatch@2.3.1: {} - picomatch@4.0.2: {} + picomatch@4.0.3: {} pify@2.3.0: {} pirates@4.0.7: {} - pkg-types@2.1.0: + pkg-types@2.2.0: dependencies: confbox: 0.2.2 - exsolve: 1.0.5 + exsolve: 1.0.7 pathe: 2.0.3 possible-typed-array-names@1.1.0: {} - postcss-import@15.1.0(postcss@8.5.3): + postcss-import@15.1.0(postcss@8.5.6): dependencies: - postcss: 8.5.3 + postcss: 8.5.6 postcss-value-parser: 4.2.0 read-cache: 1.0.0 resolve: 1.22.10 - postcss-js@4.0.1(postcss@8.5.3): + postcss-js@4.0.1(postcss@8.5.6): dependencies: camelcase-css: 2.0.1 - postcss: 8.5.3 + postcss: 8.5.6 - postcss-load-config@4.0.2(postcss@8.5.3): + postcss-load-config@4.0.2(postcss@8.5.6): dependencies: lilconfig: 3.1.3 - yaml: 2.7.1 - optionalDependencies: - postcss: 8.5.3 + postcss: 8.5.6 + yaml: 2.8.1 - postcss-nested@6.2.0(postcss@8.5.3): + postcss-nested@6.2.0(postcss@8.5.6): dependencies: - postcss: 8.5.3 + postcss: 8.5.6 postcss-selector-parser: 6.1.2 postcss-selector-parser@6.1.2: @@ -8185,7 +8183,7 @@ snapshots: postcss-value-parser@4.2.0: {} - postcss@8.5.3: + postcss@8.5.6: dependencies: nanoid: 3.3.11 picocolors: 1.1.1 @@ -8197,7 +8195,7 @@ snapshots: dependencies: fast-diff: 1.3.0 - prettier@3.5.3: {} + prettier@3.6.2: {} prism-react-renderer@2.4.1(react@18.3.1): dependencies: @@ -8219,7 +8217,7 @@ snapshots: dependencies: xtend: 4.0.2 - property-information@7.0.0: {} + property-information@7.1.0: {} proxy-from-env@1.1.0: {} @@ -8247,7 +8245,7 @@ snapshots: react: 18.3.1 scheduler: 0.23.2 - react-hook-form@7.56.2(react@18.3.1): + react-hook-form@7.62.0(react@18.3.1): dependencies: react: 18.3.1 @@ -8261,67 +8259,63 @@ snapshots: react-refresh@0.17.0: {} - react-remove-scroll-bar@2.3.8(@types/react@18.3.21)(react@18.3.1): + react-remove-scroll-bar@2.3.8(@types/react@18.3.23)(react@18.3.1): dependencies: + '@types/react': 18.3.23 react: 18.3.1 - react-style-singleton: 2.2.3(@types/react@18.3.21)(react@18.3.1) + react-style-singleton: 2.2.3(@types/react@18.3.23)(react@18.3.1) tslib: 2.8.1 - optionalDependencies: - '@types/react': 18.3.21 - react-remove-scroll@2.5.4(@types/react@18.3.21)(react@18.3.1): + react-remove-scroll@2.5.4(@types/react@18.3.23)(react@18.3.1): dependencies: + '@types/react': 18.3.23 react: 18.3.1 - react-remove-scroll-bar: 2.3.8(@types/react@18.3.21)(react@18.3.1) - react-style-singleton: 2.2.3(@types/react@18.3.21)(react@18.3.1) + react-remove-scroll-bar: 2.3.8(@types/react@18.3.23)(react@18.3.1) + react-style-singleton: 2.2.3(@types/react@18.3.23)(react@18.3.1) tslib: 2.8.1 - use-callback-ref: 1.3.3(@types/react@18.3.21)(react@18.3.1) - use-sidecar: 1.1.3(@types/react@18.3.21)(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 + use-callback-ref: 1.3.3(@types/react@18.3.23)(react@18.3.1) + use-sidecar: 1.1.3(@types/react@18.3.23)(react@18.3.1) - react-remove-scroll@2.6.3(@types/react@18.3.21)(react@18.3.1): + react-remove-scroll@2.7.1(@types/react@18.3.23)(react@18.3.1): dependencies: + '@types/react': 18.3.23 react: 18.3.1 - react-remove-scroll-bar: 2.3.8(@types/react@18.3.21)(react@18.3.1) - react-style-singleton: 2.2.3(@types/react@18.3.21)(react@18.3.1) + react-remove-scroll-bar: 2.3.8(@types/react@18.3.23)(react@18.3.1) + react-style-singleton: 2.2.3(@types/react@18.3.23)(react@18.3.1) tslib: 2.8.1 - use-callback-ref: 1.3.3(@types/react@18.3.21)(react@18.3.1) - use-sidecar: 1.1.3(@types/react@18.3.21)(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 + use-callback-ref: 1.3.3(@types/react@18.3.23)(react@18.3.1) + use-sidecar: 1.1.3(@types/react@18.3.23)(react@18.3.1) - react-router-dom@6.30.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-router-dom@6.30.1(react-dom@18.3.1)(react@18.3.1): dependencies: '@remix-run/router': 1.23.0 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-router: 6.30.0(react@18.3.1) + react-router: 6.30.1(react@18.3.1) - react-router@6.30.0(react@18.3.1): + react-router@6.30.1(react@18.3.1): dependencies: '@remix-run/router': 1.23.0 react: 18.3.1 - react-smooth@4.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-smooth@4.0.4(react-dom@18.3.1)(react@18.3.1): dependencies: fast-equals: 5.2.2 prop-types: 15.8.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-transition-group: 4.4.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-transition-group: 4.4.5(react-dom@18.3.1)(react@18.3.1) - react-style-singleton@2.2.3(@types/react@18.3.21)(react@18.3.1): + react-style-singleton@2.2.3(@types/react@18.3.23)(react@18.3.1): dependencies: + '@types/react': 18.3.23 get-nonce: 1.0.1 react: 18.3.1 tslib: 2.8.1 - optionalDependencies: - '@types/react': 18.3.21 react-syntax-highlighter@15.6.1(react@18.3.1): dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 highlight.js: 10.7.3 highlightjs-vue: 1.0.0 lowlight: 1.20.0 @@ -8329,33 +8323,32 @@ snapshots: react: 18.3.1 refractor: 3.6.0 - react-transition-group@4.4.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-transition-group@4.4.5(react-dom@18.3.1)(react@18.3.1): dependencies: - '@babel/runtime': 7.27.1 + '@babel/runtime': 7.28.3 dom-helpers: 5.2.1 loose-envify: 1.4.0 prop-types: 15.8.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-use-measure@2.1.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-use-measure@2.1.7(react-dom@18.3.1)(react@18.3.1): dependencies: react: 18.3.1 - optionalDependencies: react-dom: 18.3.1(react@18.3.1) react@18.3.1: dependencies: loose-envify: 1.4.0 - reactflow@11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + reactflow@11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1): dependencies: - '@reactflow/background': 11.3.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@reactflow/controls': 11.2.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@reactflow/core': 11.11.4(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@reactflow/minimap': 11.7.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@reactflow/node-resizer': 2.2.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@reactflow/node-toolbar': 1.3.14(@types/react@18.3.21)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@reactflow/background': 11.3.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@reactflow/controls': 11.2.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@reactflow/core': 11.11.4(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@reactflow/minimap': 11.7.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@reactflow/node-resizer': 2.2.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) + '@reactflow/node-toolbar': 1.3.14(@types/react@18.3.23)(react-dom@18.3.1)(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) transitivePeerDependencies: @@ -8376,7 +8369,7 @@ snapshots: dependencies: decimal.js-light: 2.5.1 - recharts@2.15.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + recharts@2.15.4(react-dom@18.3.1)(react@18.3.1): dependencies: clsx: 2.1.1 eventemitter3: 4.0.7 @@ -8384,7 +8377,7 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) react-is: 18.3.1 - react-smooth: 4.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-smooth: 4.0.4(react-dom@18.3.1)(react@18.3.1) recharts-scale: 0.4.5 tiny-invariant: 1.3.3 victory-vendor: 36.9.2 @@ -8403,7 +8396,7 @@ snapshots: dependencies: call-bind: 1.0.8 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-errors: 1.3.0 es-object-atoms: 1.1.1 get-intrinsic: 1.3.0 @@ -8469,30 +8462,30 @@ snapshots: optionalDependencies: fsevents: 2.3.3 - rollup@4.40.2: + rollup@4.46.2: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.40.2 - '@rollup/rollup-android-arm64': 4.40.2 - '@rollup/rollup-darwin-arm64': 4.40.2 - '@rollup/rollup-darwin-x64': 4.40.2 - '@rollup/rollup-freebsd-arm64': 4.40.2 - '@rollup/rollup-freebsd-x64': 4.40.2 - '@rollup/rollup-linux-arm-gnueabihf': 4.40.2 - '@rollup/rollup-linux-arm-musleabihf': 4.40.2 - '@rollup/rollup-linux-arm64-gnu': 4.40.2 - '@rollup/rollup-linux-arm64-musl': 4.40.2 - '@rollup/rollup-linux-loongarch64-gnu': 4.40.2 - '@rollup/rollup-linux-powerpc64le-gnu': 4.40.2 - '@rollup/rollup-linux-riscv64-gnu': 4.40.2 - '@rollup/rollup-linux-riscv64-musl': 4.40.2 - '@rollup/rollup-linux-s390x-gnu': 4.40.2 - '@rollup/rollup-linux-x64-gnu': 4.40.2 - '@rollup/rollup-linux-x64-musl': 4.40.2 - '@rollup/rollup-win32-arm64-msvc': 4.40.2 - '@rollup/rollup-win32-ia32-msvc': 4.40.2 - '@rollup/rollup-win32-x64-msvc': 4.40.2 + '@rollup/rollup-android-arm-eabi': 4.46.2 + '@rollup/rollup-android-arm64': 4.46.2 + '@rollup/rollup-darwin-arm64': 4.46.2 + '@rollup/rollup-darwin-x64': 4.46.2 + '@rollup/rollup-freebsd-arm64': 4.46.2 + '@rollup/rollup-freebsd-x64': 4.46.2 + '@rollup/rollup-linux-arm-gnueabihf': 4.46.2 + '@rollup/rollup-linux-arm-musleabihf': 4.46.2 + '@rollup/rollup-linux-arm64-gnu': 4.46.2 + '@rollup/rollup-linux-arm64-musl': 4.46.2 + '@rollup/rollup-linux-loongarch64-gnu': 4.46.2 + '@rollup/rollup-linux-ppc64-gnu': 4.46.2 + '@rollup/rollup-linux-riscv64-gnu': 4.46.2 + '@rollup/rollup-linux-riscv64-musl': 4.46.2 + '@rollup/rollup-linux-s390x-gnu': 4.46.2 + '@rollup/rollup-linux-x64-gnu': 4.46.2 + '@rollup/rollup-linux-x64-musl': 4.46.2 + '@rollup/rollup-win32-arm64-msvc': 4.46.2 + '@rollup/rollup-win32-ia32-msvc': 4.46.2 + '@rollup/rollup-win32-x64-msvc': 4.46.2 fsevents: 2.3.3 run-parallel@1.2.0: @@ -8524,7 +8517,7 @@ snapshots: semver@6.3.1: {} - semver@7.7.1: {} + semver@7.7.2: {} set-function-length@1.2.2: dependencies: @@ -8554,14 +8547,14 @@ snapshots: shebang-regex@3.0.0: {} - shiki@3.4.0: + shiki@3.9.2: dependencies: - '@shikijs/core': 3.4.0 - '@shikijs/engine-javascript': 3.4.0 - '@shikijs/engine-oniguruma': 3.4.0 - '@shikijs/langs': 3.4.0 - '@shikijs/themes': 3.4.0 - '@shikijs/types': 3.4.0 + '@shikijs/core': 3.9.2 + '@shikijs/engine-javascript': 3.9.2 + '@shikijs/engine-oniguruma': 3.9.2 + '@shikijs/langs': 3.9.2 + '@shikijs/themes': 3.9.2 + '@shikijs/types': 3.9.2 '@shikijs/vscode-textmate': 10.0.2 '@types/hast': 3.0.4 @@ -8633,6 +8626,11 @@ snapshots: state-local@1.0.7: {} + stop-iteration-iterator@1.1.0: + dependencies: + es-errors: 1.3.0 + internal-slot: 1.1.0 + string-width@4.2.3: dependencies: emoji-regex: 8.0.0 @@ -8650,7 +8648,7 @@ snapshots: call-bind: 1.0.8 call-bound: 1.0.4 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-errors: 1.3.0 es-object-atoms: 1.1.1 get-intrinsic: 1.3.0 @@ -8664,7 +8662,7 @@ snapshots: string.prototype.repeat@1.0.0: dependencies: define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 string.prototype.trim@1.2.10: dependencies: @@ -8672,7 +8670,7 @@ snapshots: call-bound: 1.0.4 define-data-property: 1.1.4 define-properties: 1.2.1 - es-abstract: 1.23.9 + es-abstract: 1.24.0 es-object-atoms: 1.1.1 has-property-descriptors: 1.0.2 @@ -8708,7 +8706,7 @@ snapshots: sucrase@3.35.0: dependencies: - '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/gen-mapping': 0.3.13 commander: 4.1.1 glob: 10.4.5 lines-and-columns: 1.2.4 @@ -8724,12 +8722,12 @@ snapshots: swagger-schema-official@2.0.0-bab6bed: {} - swagger-typescript-api@13.1.3: + swagger-typescript-api@13.2.8: dependencies: - '@biomejs/js-api': 0.7.1(@biomejs/wasm-nodejs@1.9.4) - '@biomejs/wasm-nodejs': 1.9.4 + '@biomejs/js-api': 2.0.3(@biomejs/wasm-nodejs@2.1.4) + '@biomejs/wasm-nodejs': 2.1.4 '@types/swagger-schema-official': 2.0.25 - c12: 3.0.3 + c12: 3.2.0 citty: 0.1.6 consola: 3.4.2 eta: 2.2.0 @@ -8738,7 +8736,7 @@ snapshots: nanoid: 5.1.5 swagger-schema-official: 2.0.0-bab6bed swagger2openapi: 7.0.8 - typescript: 5.8.3 + typescript: 5.9.2 transitivePeerDependencies: - '@biomejs/wasm-bundler' - '@biomejs/wasm-web' @@ -8761,10 +8759,9 @@ snapshots: transitivePeerDependencies: - encoding - synckit@0.11.4: + synckit@0.11.11: dependencies: - '@pkgr/core': 0.2.4 - tslib: 2.8.1 + '@pkgr/core': 0.2.9 tailwind-merge@2.6.0: {} @@ -8788,11 +8785,11 @@ snapshots: normalize-path: 3.0.0 object-hash: 3.0.0 picocolors: 1.1.1 - postcss: 8.5.3 - postcss-import: 15.1.0(postcss@8.5.3) - postcss-js: 4.0.1(postcss@8.5.3) - postcss-load-config: 4.0.2(postcss@8.5.3) - postcss-nested: 6.2.0(postcss@8.5.3) + postcss: 8.5.6 + postcss-import: 15.1.0(postcss@8.5.6) + postcss-js: 4.0.1(postcss@8.5.6) + postcss-load-config: 4.0.2(postcss@8.5.6) + postcss-nested: 6.2.0(postcss@8.5.6) postcss-selector-parser: 6.1.2 resolve: 1.22.10 sucrase: 3.35.0 @@ -8818,12 +8815,12 @@ snapshots: tiny-invariant@1.3.3: {} - tinyexec@0.3.2: {} + tinyexec@1.0.1: {} - tinyglobby@0.2.13: + tinyglobby@0.2.14: dependencies: - fdir: 6.4.4(picomatch@4.0.2) - picomatch: 4.0.2 + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 to-regex-range@5.0.1: dependencies: @@ -8833,17 +8830,17 @@ snapshots: trim-lines@3.0.1: {} - ts-api-utils@1.4.3(typescript@5.8.3): + ts-api-utils@1.4.3(typescript@5.9.2): dependencies: - typescript: 5.8.3 + typescript: 5.9.2 ts-interface-checker@0.1.13: {} - ts-unused-exports@11.0.1(typescript@5.8.3): + ts-unused-exports@11.0.1(typescript@5.9.2): dependencies: chalk: 4.1.2 tsconfig-paths: 3.15.0 - typescript: 5.8.3 + typescript: 5.9.2 tsconfig-paths@3.15.0: dependencies: @@ -8854,10 +8851,10 @@ snapshots: tslib@2.8.1: {} - tsx@4.19.4: + tsx@4.20.4: dependencies: - esbuild: 0.25.4 - get-tsconfig: 4.10.0 + esbuild: 0.25.9 + get-tsconfig: 4.10.1 optionalDependencies: fsevents: 2.3.3 @@ -8900,7 +8897,7 @@ snapshots: possible-typed-array-names: 1.1.0 reflect.getprototypeof: 1.0.10 - typescript@5.8.3: {} + typescript@5.9.2: {} unbox-primitive@1.1.0: dependencies: @@ -8909,7 +8906,7 @@ snapshots: has-symbols: 1.1.0 which-boxed-primitive: 1.1.1 - undici-types@6.19.8: {} + undici-types@6.21.0: {} unist-util-is@6.0.0: dependencies: @@ -8936,36 +8933,38 @@ snapshots: unplugin@1.0.1: dependencies: - acorn: 8.14.1 + acorn: 8.15.0 chokidar: 3.6.0 - webpack-sources: 3.2.3 + webpack-sources: 3.3.3 webpack-virtual-modules: 0.5.0 - unrs-resolver@1.7.2: + unrs-resolver@1.11.1: dependencies: - napi-postinstall: 0.2.3 + napi-postinstall: 0.3.3 optionalDependencies: - '@unrs/resolver-binding-darwin-arm64': 1.7.2 - '@unrs/resolver-binding-darwin-x64': 1.7.2 - '@unrs/resolver-binding-freebsd-x64': 1.7.2 - '@unrs/resolver-binding-linux-arm-gnueabihf': 1.7.2 - '@unrs/resolver-binding-linux-arm-musleabihf': 1.7.2 - '@unrs/resolver-binding-linux-arm64-gnu': 1.7.2 - '@unrs/resolver-binding-linux-arm64-musl': 1.7.2 - '@unrs/resolver-binding-linux-ppc64-gnu': 1.7.2 - '@unrs/resolver-binding-linux-riscv64-gnu': 1.7.2 - '@unrs/resolver-binding-linux-riscv64-musl': 1.7.2 - '@unrs/resolver-binding-linux-s390x-gnu': 1.7.2 - '@unrs/resolver-binding-linux-x64-gnu': 1.7.2 - '@unrs/resolver-binding-linux-x64-musl': 1.7.2 - '@unrs/resolver-binding-wasm32-wasi': 1.7.2 - '@unrs/resolver-binding-win32-arm64-msvc': 1.7.2 - '@unrs/resolver-binding-win32-ia32-msvc': 1.7.2 - '@unrs/resolver-binding-win32-x64-msvc': 1.7.2 + '@unrs/resolver-binding-android-arm-eabi': 1.11.1 + '@unrs/resolver-binding-android-arm64': 1.11.1 + '@unrs/resolver-binding-darwin-arm64': 1.11.1 + '@unrs/resolver-binding-darwin-x64': 1.11.1 + '@unrs/resolver-binding-freebsd-x64': 1.11.1 + '@unrs/resolver-binding-linux-arm-gnueabihf': 1.11.1 + '@unrs/resolver-binding-linux-arm-musleabihf': 1.11.1 + '@unrs/resolver-binding-linux-arm64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-arm64-musl': 1.11.1 + '@unrs/resolver-binding-linux-ppc64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-riscv64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-riscv64-musl': 1.11.1 + '@unrs/resolver-binding-linux-s390x-gnu': 1.11.1 + '@unrs/resolver-binding-linux-x64-gnu': 1.11.1 + '@unrs/resolver-binding-linux-x64-musl': 1.11.1 + '@unrs/resolver-binding-wasm32-wasi': 1.11.1 + '@unrs/resolver-binding-win32-arm64-msvc': 1.11.1 + '@unrs/resolver-binding-win32-ia32-msvc': 1.11.1 + '@unrs/resolver-binding-win32-x64-msvc': 1.11.1 - update-browserslist-db@1.1.3(browserslist@4.24.5): + update-browserslist-db@1.1.3(browserslist@4.25.2): dependencies: - browserslist: 4.24.5 + browserslist: 4.25.2 escalade: 3.2.0 picocolors: 1.1.1 @@ -8973,20 +8972,18 @@ snapshots: dependencies: punycode: 2.3.1 - use-callback-ref@1.3.3(@types/react@18.3.21)(react@18.3.1): + use-callback-ref@1.3.3(@types/react@18.3.23)(react@18.3.1): dependencies: + '@types/react': 18.3.23 react: 18.3.1 tslib: 2.8.1 - optionalDependencies: - '@types/react': 18.3.21 - use-sidecar@1.1.3(@types/react@18.3.21)(react@18.3.1): + use-sidecar@1.1.3(@types/react@18.3.23)(react@18.3.1): dependencies: + '@types/react': 18.3.23 detect-node-es: 1.1.0 react: 18.3.1 tslib: 2.8.1 - optionalDependencies: - '@types/react': 18.3.21 use-sync-external-store@1.5.0(react@18.3.1): dependencies: @@ -9009,7 +9006,7 @@ snapshots: validate.io-number@1.0.3: {} - vfile-message@4.0.2: + vfile-message@4.0.3: dependencies: '@types/unist': 3.0.3 unist-util-stringify-position: 4.0.0 @@ -9017,7 +9014,7 @@ snapshots: vfile@6.0.3: dependencies: '@types/unist': 3.0.3 - vfile-message: 4.0.2 + vfile-message: 4.0.3 victory-vendor@36.9.2: dependencies: @@ -9036,32 +9033,30 @@ snapshots: d3-time: 3.1.0 d3-timer: 3.0.1 - vite-plugin-eslint@1.8.1(eslint@8.57.1)(vite@6.3.5(@types/node@20.17.43)(jiti@2.4.2)(tsx@4.19.4)(yaml@2.7.1)): + vite-plugin-eslint@1.8.1(eslint@8.57.1)(vite@6.3.5): dependencies: '@rollup/pluginutils': 4.2.1 '@types/eslint': 8.56.12 eslint: 8.57.1 rollup: 2.79.2 - vite: 6.3.5(@types/node@20.17.43)(jiti@2.4.2)(tsx@4.19.4)(yaml@2.7.1) + vite: 6.3.5(@types/node@20.19.10)(tsx@4.20.4) - vite@6.3.5(@types/node@20.17.43)(jiti@2.4.2)(tsx@4.19.4)(yaml@2.7.1): + vite@6.3.5(@types/node@20.19.10)(tsx@4.20.4): dependencies: - esbuild: 0.25.4 - fdir: 6.4.4(picomatch@4.0.2) - picomatch: 4.0.2 - postcss: 8.5.3 - rollup: 4.40.2 - tinyglobby: 0.2.13 + '@types/node': 20.19.10 + esbuild: 0.25.9 + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + postcss: 8.5.6 + rollup: 4.46.2 + tinyglobby: 0.2.14 + tsx: 4.20.4 optionalDependencies: - '@types/node': 20.17.43 fsevents: 2.3.3 - jiti: 2.4.2 - tsx: 4.19.4 - yaml: 2.7.1 webidl-conversions@3.0.1: {} - webpack-sources@3.2.3: {} + webpack-sources@3.3.3: {} webpack-virtual-modules@0.5.0: {} @@ -9139,7 +9134,7 @@ snapshots: yaml@1.10.2: {} - yaml@2.7.1: {} + yaml@2.8.1: {} yargs-parser@21.1.1: {} @@ -9155,13 +9150,12 @@ snapshots: yocto-queue@0.1.0: {} - zod@3.24.4: {} + zod@3.25.76: {} - zustand@4.5.6(@types/react@18.3.21)(react@18.3.1): + zustand@4.5.7(@types/react@18.3.23)(react@18.3.1): dependencies: - use-sync-external-store: 1.5.0(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.21 + '@types/react': 18.3.23 react: 18.3.1 + use-sync-external-store: 1.5.0(react@18.3.1) zwitch@2.0.4: {} diff --git a/frontend/app/src/lib/api/generated/http-client.ts b/frontend/app/src/lib/api/generated/http-client.ts index c354751f8..a587498bd 100644 --- a/frontend/app/src/lib/api/generated/http-client.ts +++ b/frontend/app/src/lib/api/generated/http-client.ts @@ -53,6 +53,7 @@ export interface ApiConfig export enum ContentType { Json = "application/json", + JsonApi = "application/vnd.api+json", FormData = "multipart/form-data", UrlEncoded = "application/x-www-form-urlencoded", Text = "text/plain", diff --git a/frontend/app/src/next/lib/docs/generated/_meta.ts b/frontend/app/src/next/lib/docs/generated/_meta.ts deleted file mode 100644 index ff15359e9..000000000 --- a/frontend/app/src/next/lib/docs/generated/_meta.ts +++ /dev/null @@ -1,58 +0,0 @@ -// Generated from frontend/docs/pages/_meta.js -const meta = { - home: { - title: 'User Guide', - type: 'page', - theme: { - toc: false, - }, - }, - _setup: { - display: 'hidden', - }, - compute: { - title: 'Managed Compute', - type: 'page', - href: '/home/compute', - index: 'Overview', - 'getting-started': 'Getting Started', - cpu: 'CPU Machine Types', - gpu: 'GPU Machine Types', - }, - self_hosting: { - title: 'Self Hosting', - type: 'page', - theme: { - toc: false, - }, - }, - blog: { - title: 'Blog', - type: 'page', - }, - contributing: { - title: 'Contributing', - type: 'page', - display: 'hidden', - theme: { - toc: false, - }, - }, - sdks: { - title: 'SDK Reference', - type: 'menu', - items: { - python: { - title: 'Python', - href: '/sdks/python/client', - type: 'page', - }, - }, - }, - v0: { - title: 'V0 (Old docs)', - type: 'page', - href: 'https://v0-docs.hatchet.run', - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/blog/_meta.ts b/frontend/app/src/next/lib/docs/generated/blog/_meta.ts deleted file mode 100644 index dd5d9ac14..000000000 --- a/frontend/app/src/next/lib/docs/generated/blog/_meta.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Generated from frontend/docs/pages/blog/_meta.js -const meta = { - background_tasks_fastapi_hatchet: { - title: 'Background Tasks: From FastAPI to Hatchet', - href: '/blog/background-tasks-fastapi-hatchet', - }, - go_agents: { - title: 'Why Go is a good fit for agents', - href: '/blog/go-agents', - }, - warning_event_loop_blocked: { - title: 'Warning! The Event Loop May Be Blocked', - href: '/blog/warning-event-loop-blocked', - }, - fastest_postgres_inserts: { - title: 'The fastest Postgres inserts', - href: '/blog/fastest-postgres-inserts', - }, - task_queue_modern_python: { - title: 'A task queue for modern Python applications', - href: '/blog/task-queue-modern-python', - }, - postgres_events_table: { - title: 'Use Postgres for your events table', - href: '/blog/postgres-events-table', - }, - migrating_off_prisma: { - title: 'Why we moved off Prisma', - href: '/blog/migrating-off-prisma', - }, - problems_with_celery: { - title: 'The problems with Celery', - display: 'hidden', - href: '/blog/problems-with-celery', - }, - multi_tenant_queues: { - title: 'An unfair advantage: multi-tenant queues in Postgres', - href: '/blog/multi-tenant-queues', - }, - '--migration-guides': { - title: 'Migration Guides', - type: 'separator', - }, - mergent_migration_guide: { - title: 'Migrating from Mergent', - href: '/blog/mergent-migration-guide', - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/contributing/_meta.ts b/frontend/app/src/next/lib/docs/generated/contributing/_meta.ts deleted file mode 100644 index c63de79c8..000000000 --- a/frontend/app/src/next/lib/docs/generated/contributing/_meta.ts +++ /dev/null @@ -1,16 +0,0 @@ -// Generated from frontend/docs/pages/contributing/_meta.js -const meta = { - index: { - title: 'Contributing', - href: '/contributing/', - }, - github_app_setup: { - title: 'GitHub App Setup', - href: '/contributing/github-app-setup', - }, - sdks: { - title: 'SDKs', - href: '/contributing/sdks', - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/home/_meta.ts b/frontend/app/src/next/lib/docs/generated/home/_meta.ts deleted file mode 100644 index c00a1d854..000000000 --- a/frontend/app/src/next/lib/docs/generated/home/_meta.ts +++ /dev/null @@ -1,265 +0,0 @@ -// Generated from frontend/docs/pages/home/_meta.js -const meta = { - '--intro': { - title: 'Introduction', - type: 'separator', - }, - index: { - title: 'Introduction', - href: '/home/', - }, - hatchet_cloud_quickstart: { - title: 'Hatchet Cloud Quickstart', - href: '/home/hatchet-cloud-quickstart', - }, - '--quickstart': { - title: 'Quickstart', - type: 'separator', - }, - setup: { - title: 'Setup', - href: '/home/setup', - }, - your_first_task: { - title: 'Tasks', - href: '/home/your-first-task', - }, - workers: { - title: 'Workers', - href: '/home/workers', - }, - running_your_task: { - title: 'Running Tasks', - href: '/home/running-your-task', - }, - environments: { - title: 'Environments', - href: '/home/environments', - }, - '--running-tasks': { - title: 'Ways of Running Tasks', - type: 'separator', - }, - running_tasks: { - title: 'Introduction', - href: '/home/running-tasks', - }, - run_with_results: { - title: 'Run and Wait Trigger', - href: '/home/run-with-results', - }, - run_no_wait: { - title: 'Run Without Wait Trigger', - href: '/home/run-no-wait', - }, - scheduled_runs: { - title: 'Scheduled Trigger', - href: '/home/scheduled-runs', - }, - cron_runs: { - title: 'Cron Trigger', - href: '/home/cron-runs', - }, - run_on_event: { - title: 'Event Trigger', - href: '/home/run-on-event', - }, - bulk_run: { - title: 'Bulk Run Many', - href: '/home/bulk-run', - }, - webhooks: { - title: 'Webhooks', - href: '/home/webhooks', - }, - '--flow-control': { - title: 'Flow Control', - type: 'separator', - }, - concurrency: { - title: 'Concurrency', - href: '/home/concurrency', - }, - rate_limits: { - title: 'Rate Limits', - href: '/home/rate-limits', - }, - priority: { - title: 'Priority', - href: '/home/priority', - }, - '--advanced-workflows': { - title: 'Workflows', - type: 'separator', - }, - orchestration: { - title: 'Task Orchestration', - href: '/home/orchestration', - }, - dags: { - title: 'Directed Acyclic Graphs (DAGs)', - href: '/home/dags', - }, - conditional_workflows: { - title: 'Conditional Workflows', - href: '/home/conditional-workflows', - }, - on_failure_tasks: { - title: 'On Failure Tasks', - href: '/home/on-failure-tasks', - }, - child_spawning: { - title: 'Child Spawning', - href: '/home/child-spawning', - }, - additional_metadata: { - title: 'Additional Metadata', - href: '/home/additional-metadata', - }, - '--durable-execution': { - title: 'Durable Execution', - type: 'separator', - }, - durable_execution: { - title: 'Durable Execution', - href: '/home/durable-execution', - }, - durable_events: { - title: 'Durable Events', - href: '/home/durable-events', - }, - durable_sleep: { - title: 'Durable Sleep', - href: '/home/durable-sleep', - }, - durable_best_practices: { - title: 'Best Practices', - href: '/home/durable-best-practices', - }, - '--error-handling': { - title: 'Error Handling', - type: 'separator', - }, - timeouts: { - title: 'Timeouts', - href: '/home/timeouts', - }, - retry_policies: { - title: 'Retry Policies', - href: '/home/retry-policies', - }, - bulk_retries_and_cancellations: { - title: 'Bulk Retries and Cancellations', - href: '/home/bulk-retries-and-cancellations', - }, - '--assignment': { - title: 'Advanced Assignment', - type: 'separator', - }, - sticky_assignment: { - title: 'Sticky Assignment', - href: '/home/sticky-assignment', - }, - worker_affinity: { - title: 'Worker Affinity', - href: '/home/worker-affinity', - }, - manual_slot_release: { - title: 'Manual Slot Release', - href: '/home/manual-slot-release', - }, - '--observability': { - title: 'Observability', - type: 'separator', - }, - logging: { - title: 'Logging', - href: '/home/logging', - }, - opentelemetry: { - title: 'OpenTelemetry', - href: '/home/opentelemetry', - }, - prometheus_metrics: { - title: 'Prometheus Metrics', - href: '/home/prometheus-metrics', - }, - '--deploying-workers': { - title: 'Deploying Workers', - type: 'separator', - }, - docker: { - title: 'Running with Docker', - href: '/home/docker', - }, - compute: { - title: 'Managed Compute', - href: '/home/compute', - }, - worker_healthchecks: { - title: 'Worker Health Checks', - href: '/home/worker-healthchecks', - }, - '--advanced-tasks': { - title: 'Advanced Task Features', - type: 'separator', - }, - cancellation: { - title: 'Cancellation', - href: '/home/cancellation', - }, - streaming: { - title: 'Streaming', - href: '/home/streaming', - }, - '--v1-migration-guides': { - title: 'V1 Migration Guides', - type: 'separator', - }, - v1_sdk_improvements: { - title: 'SDK Improvements', - href: '/home/v1-sdk-improvements', - }, - migration_guide_engine: { - title: 'Engine Migration Guide', - href: '/home/migration-guide-engine', - }, - migration_guide_python: { - title: 'Python Migration Guide', - href: '/home/migration-guide-python', - }, - migration_guide_typescript: { - title: 'Typescript Migration Guide', - href: '/home/migration-guide-typescript', - }, - migration_guide_go: { - title: 'Go Migration Guide', - href: '/home/migration-guide-go', - }, - '--python': { - title: 'Python Specifics', - type: 'separator', - }, - asyncio: { - title: 'Asyncio', - href: '/home/asyncio', - }, - pydantic: { - title: 'Pydantic', - href: '/home/pydantic', - }, - lifespans: { - title: 'Lifespans', - href: '/home/lifespans', - }, - dependency_injection: { - title: 'Dependency Injection', - href: '/home/dependency-injection', - }, - blog: { - title: 'Blog', - type: 'page', - href: '/blog', - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/home/compute/_meta.ts b/frontend/app/src/next/lib/docs/generated/home/compute/_meta.ts deleted file mode 100644 index ddc7508c6..000000000 --- a/frontend/app/src/next/lib/docs/generated/home/compute/_meta.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Generated from frontend/docs/pages/home/compute/_meta.js -const meta = { - index: { - title: 'Overview', - href: '/home/compute/', - }, - getting_started: { - title: 'Getting Started', - href: '/home/compute/getting-started', - }, - cpu: { - title: 'CPU Machine Types', - href: '/home/compute/cpu', - }, - gpu: { - title: 'GPU Machine Types', - href: '/home/compute/gpu', - }, - git_ops: { - title: 'GitOps', - href: '/home/compute/git-ops', - }, - auto_scaling: { - title: 'Auto Scaling', - href: '/home/compute/auto-scaling', - }, - environment_variables: { - title: 'Environment Variables', - href: '/home/compute/environment-variables', - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/index.ts b/frontend/app/src/next/lib/docs/generated/index.ts deleted file mode 100644 index 5ee6aada2..000000000 --- a/frontend/app/src/next/lib/docs/generated/index.ts +++ /dev/null @@ -1,21 +0,0 @@ -// Generated index file for meta-data -import root from './_meta'; -import blog from './blog/_meta'; -import contributing from './contributing/_meta'; -import home from './home/_meta'; -import homecompute from './home/compute/_meta'; -import sdks from './sdks/_meta'; -import sdkspython from './sdks/python/_meta'; -import sdkspythonfeature_clients from './sdks/python/feature-clients/_meta'; -import self_hosting from './self-hosting/_meta'; -export { - root, - blog, - contributing, - home, - homecompute, - sdks, - sdkspython, - sdkspythonfeature_clients, - self_hosting, -}; diff --git a/frontend/app/src/next/lib/docs/generated/sdks/_meta.ts b/frontend/app/src/next/lib/docs/generated/sdks/_meta.ts deleted file mode 100644 index f63f73776..000000000 --- a/frontend/app/src/next/lib/docs/generated/sdks/_meta.ts +++ /dev/null @@ -1,11 +0,0 @@ -// Generated from frontend/docs/pages/sdks/_meta.js -const meta = { - python: { - title: 'Python SDK', - type: 'page', - theme: { - toc: true, - }, - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/sdks/python/_meta.ts b/frontend/app/src/next/lib/docs/generated/sdks/python/_meta.ts deleted file mode 100644 index 1eaf7b1d4..000000000 --- a/frontend/app/src/next/lib/docs/generated/sdks/python/_meta.ts +++ /dev/null @@ -1,32 +0,0 @@ -// Generated from frontend/docs/pages/sdks/python/_meta.js -const meta = { - client: { - title: 'Client', - theme: { - toc: true, - }, - href: '/sdks/python/client', - }, - context: { - title: 'Context', - theme: { - toc: true, - }, - href: '/sdks/python/context', - }, - feature_clients: { - title: 'Feature Clients', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients', - }, - runnables: { - title: 'Runnables', - theme: { - toc: true, - }, - href: '/sdks/python/runnables', - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/sdks/python/feature-clients/_meta.ts b/frontend/app/src/next/lib/docs/generated/sdks/python/feature-clients/_meta.ts deleted file mode 100644 index c4ef370ab..000000000 --- a/frontend/app/src/next/lib/docs/generated/sdks/python/feature-clients/_meta.ts +++ /dev/null @@ -1,67 +0,0 @@ -// Generated from frontend/docs/pages/sdks/python/feature-clients/_meta.js -const meta = { - cron: { - title: 'Cron', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/cron', - }, - filters: { - title: 'Filters', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/filters', - }, - logs: { - title: 'Logs', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/logs', - }, - metrics: { - title: 'Metrics', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/metrics', - }, - rate_limits: { - title: 'Rate Limits', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/rate_limits', - }, - runs: { - title: 'Runs', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/runs', - }, - scheduled: { - title: 'Scheduled', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/scheduled', - }, - workers: { - title: 'Workers', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/workers', - }, - workflows: { - title: 'Workflows', - theme: { - toc: true, - }, - href: '/sdks/python/feature-clients/workflows', - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/self-hosting/_meta.ts b/frontend/app/src/next/lib/docs/generated/self-hosting/_meta.ts deleted file mode 100644 index 9d675bde6..000000000 --- a/frontend/app/src/next/lib/docs/generated/self-hosting/_meta.ts +++ /dev/null @@ -1,87 +0,0 @@ -// Generated from frontend/docs/pages/self-hosting/_meta.js -const meta = { - index: { - title: 'Introduction', - href: '/self-hosting/', - }, - '-- Docker': { - type: 'separator', - title: 'Docker', - }, - hatchet_lite: { - title: 'Hatchet Lite', - href: '/self-hosting/hatchet-lite', - }, - docker_compose: { - title: 'Docker Compose', - href: '/self-hosting/docker-compose', - }, - '-- Kubernetes': { - type: 'separator', - title: 'Kubernetes', - }, - kubernetes_quickstart: { - title: 'Quickstart', - href: '/self-hosting/kubernetes-quickstart', - }, - kubernetes_glasskube: { - title: 'Installing with Glasskube', - href: '/self-hosting/kubernetes-glasskube', - }, - networking: { - title: 'Networking', - href: '/self-hosting/networking', - }, - kubernetes_helm_configuration: { - title: 'Configuring the Helm Chart', - href: '/self-hosting/kubernetes-helm-configuration', - }, - kubernetes_external_database: { - title: 'Setting up an External Database', - href: '/self-hosting/kubernetes-external-database', - }, - high_availability: { - title: 'High Availability', - href: '/self-hosting/high-availability', - }, - '-- Managing Hatchet': { - type: 'separator', - title: 'Managing Hatchet', - }, - configuration_options: { - title: 'Engine Configuration Options', - href: '/self-hosting/configuration-options', - }, - prometheus_metrics: { - title: 'Prometheus Metrics', - theme: { - toc: true, - }, - href: '/self-hosting/prometheus-metrics', - }, - worker_configuration_options: { - title: 'Worker Configuration Options', - href: '/self-hosting/worker-configuration-options', - }, - benchmarking: { - title: 'Benchmarking', - href: '/self-hosting/benchmarking', - }, - data_retention: { - title: 'Data Retention', - href: '/self-hosting/data-retention', - }, - improving_performance: { - title: 'Improving Performance', - href: '/self-hosting/improving-performance', - }, - read_replicas: { - title: 'Read Replicas', - href: '/self-hosting/read-replicas', - }, - sampling: { - title: 'Trace Sampling', - href: '/self-hosting/sampling', - }, -}; -export default meta; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/bulk-operations/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/bulk-operations/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/bulk-operations/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/bulk-operations/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/bulk-operations/main.ts deleted file mode 100644 index 66a01cc6e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/bulk-operations/main.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/google/uuid"\n\t"github.com/oapi-codegen/runtime/types"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/rest"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n)\n\nfunc main() {\n\t// > Setup\n\n\thatchet, err := v1.NewHatchetClient()\n\tif err != nil {\n\t\tlog.Fatalf("failed to create hatchet client: %v", err)\n\t}\n\n\tctx := context.Background()\n\n\tworkflows, err := hatchet.Workflows().List(ctx, nil)\n\tif err != nil {\n\t\tlog.Fatalf("failed to list workflows: %v", err)\n\t}\n\n\tif workflows == nil || workflows.Rows == nil || len(*workflows.Rows) == 0 {\n\t\tlog.Fatalf("no workflows found")\n\t}\n\n\tselectedWorkflow := (*workflows.Rows)[0]\n\tselectedWorkflowUUID := uuid.MustParse(selectedWorkflow.Metadata.Id)\n\n\n\t// > List runs\n\tworkflowRuns, err := hatchet.Runs().List(ctx, rest.V1WorkflowRunListParams{\n\t\tWorkflowIds: &[]types.UUID{selectedWorkflowUUID},\n\t})\n\tif err != nil || workflowRuns == nil || workflowRuns.JSON200 == nil || workflowRuns.JSON200.Rows == nil {\n\t\tlog.Fatalf("failed to list workflow runs for workflow %s: %v", selectedWorkflow.Name, err)\n\t}\n\n\tvar runIds []types.UUID\n\n\tfor _, run := range workflowRuns.JSON200.Rows {\n\t\trunIds = append(runIds, uuid.MustParse(run.Metadata.Id))\n\t}\n\n\n\t// > Cancel by run ids\n\t_, err = hatchet.Runs().Cancel(ctx, rest.V1CancelTaskRequest{\n\t\tExternalIds: &runIds,\n\t})\n\tif err != nil {\n\t\tlog.Fatalf("failed to cancel runs by ids: %v", err)\n\t}\n\n\n\t// > Cancel by filters\n\ttNow := time.Now().UTC()\n\n\t_, err = hatchet.Runs().Cancel(ctx, rest.V1CancelTaskRequest{\n\t\tFilter: &rest.V1TaskFilter{\n\t\t\tSince: tNow.Add(-24 * time.Hour),\n\t\t\tUntil: &tNow,\n\t\t\tStatuses: &[]rest.V1TaskStatus{rest.V1TaskStatusRUNNING},\n\t\t\tWorkflowIds: &[]types.UUID{selectedWorkflowUUID},\n\t\t\tAdditionalMetadata: &[]string{`{"key": "value"}`},\n\t\t},\n\t})\n\tif err != nil {\n\t\tlog.Fatalf("failed to cancel runs by filters: %v", err)\n\t}\n\n\n\tfmt.Println("cancelled all runs for workflow", selectedWorkflow.Name)\n}\n', - source: 'out/go/bulk-operations/main.go', - blocks: { - setup: { - start: 18, - stop: 37, - }, - list_runs: { - start: 40, - stop: 52, - }, - cancel_by_run_ids: { - start: 55, - stop: 61, - }, - cancel_by_filters: { - start: 64, - stop: 78, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/index.ts deleted file mode 100644 index 8eaeda170..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -import * as bulk_operations from './bulk-operations'; -import * as migration_guides from './migration-guides'; -import * as quickstart from './quickstart'; -import * as run from './run'; -import * as streaming from './streaming'; -import * as worker from './worker'; -import * as workflows from './workflows'; -import * as z_v0 from './z_v0'; - -export { bulk_operations }; -export { migration_guides }; -export { quickstart }; -export { run }; -export { streaming }; -export { worker }; -export { workflows }; -export { z_v0 }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/hatchet-client.ts b/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/hatchet-client.ts deleted file mode 100644 index 3ef8aca10..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/hatchet-client.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package migration_guides\n\nimport (\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n)\n\nfunc HatchetClient() (v1.HatchetClient, error) {\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn hatchet, nil\n}\n', - source: 'out/go/migration-guides/hatchet-client.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/index.ts deleted file mode 100644 index e3b7e4fb7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import hatchet_client from './hatchet-client'; -import mergent from './mergent'; - -export { hatchet_client }; -export { mergent }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/mergent.ts b/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/mergent.ts deleted file mode 100644 index 7c0081847..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/migration-guides/mergent.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package migration_guides\n\nimport (\n\t"bytes"\n\t"context"\n\t"encoding/json"\n\t"fmt"\n\t"net/http"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\tv1worker "github.com/hatchet-dev/hatchet/pkg/v1/worker"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\n// ProcessImage simulates image processing\nfunc ProcessImage(imageURL string, filters []string) (map[string]interface{}, error) {\n\t// Do some image processing\n\treturn map[string]interface{}{\n\t\t"url": imageURL,\n\t\t"size": 100,\n\t\t"format": "png",\n\t}, nil\n}\n\n// > Before (Mergent)\ntype MergentRequest struct {\n\tImageURL string `json:"image_url"`\n\tFilters []string `json:"filters"`\n}\n\ntype MergentResponse struct {\n\tSuccess bool `json:"success"`\n\tProcessedURL string `json:"processed_url"`\n}\n\nfunc ProcessImageMergent(req MergentRequest) (*MergentResponse, error) {\n\tresult, err := ProcessImage(req.ImageURL, req.Filters)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &MergentResponse{\n\t\tSuccess: true,\n\t\tProcessedURL: result["url"].(string),\n\t}, nil\n}\n\n\n// > After (Hatchet)\ntype ImageProcessInput struct {\n\tImageURL string `json:"image_url"`\n\tFilters []string `json:"filters"`\n}\n\ntype ImageProcessOutput struct {\n\tProcessedURL string `json:"processed_url"`\n\tMetadata struct {\n\t\tSize int `json:"size"`\n\t\tFormat string `json:"format"`\n\t\tAppliedFilters []string `json:"applied_filters"`\n\t} `json:"metadata"`\n}\n\nfunc ImageProcessor(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ImageProcessInput, ImageProcessOutput] {\n\tprocessor := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "image-processor",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input ImageProcessInput) (*ImageProcessOutput, error) {\n\t\t\tresult, err := ProcessImage(input.ImageURL, input.Filters)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf("processing image: %w", err)\n\t\t\t}\n\n\t\t\tif result["url"] == "" {\n\t\t\t\treturn nil, fmt.Errorf("processing failed to generate URL")\n\t\t\t}\n\n\t\t\toutput := &ImageProcessOutput{\n\t\t\t\tProcessedURL: result["url"].(string),\n\t\t\t\tMetadata: struct {\n\t\t\t\t\tSize int `json:"size"`\n\t\t\t\t\tFormat string `json:"format"`\n\t\t\t\t\tAppliedFilters []string `json:"applied_filters"`\n\t\t\t\t}{\n\t\t\t\t\tSize: result["size"].(int),\n\t\t\t\t\tFormat: result["format"].(string),\n\t\t\t\t\tAppliedFilters: input.Filters,\n\t\t\t\t},\n\t\t\t}\n\n\t\t\treturn output, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\t// Example of running a task\n\t_ = func() error {\n\t\t// > Running a task\n\t\tresult, err := processor.Run(context.Background(), ImageProcessInput{\n\t\t\tImageURL: "https://example.com/image.png",\n\t\t\tFilters: []string{"blur"},\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Printf("Result: %+v\\n", result)\n\t\treturn nil\n\t}\n\n\t// Example of registering a task on a worker\n\t_ = func() error {\n\t\t// > Declaring a Worker\n\t\tw, err := hatchet.Worker(v1worker.WorkerOpts{\n\t\t\tName: "image-processor-worker",\n\t\t\tWorkflows: []workflow.WorkflowBase{\n\t\t\t\tprocessor,\n\t\t\t},\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = w.StartBlocking(context.Background())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\treturn processor\n}\n\nfunc RunMergentTask() error {\n\n\treturn nil\n}\n\nfunc RunningTasks(hatchet v1.HatchetClient) error {\n\t// > Running a task (Mergent)\n\ttask := struct {\n\t\tRequest struct {\n\t\t\tURL string `json:"url"`\n\t\t\tBody string `json:"body"`\n\t\t\tHeaders map[string]string `json:"headers"`\n\t\t} `json:"request"`\n\t\tName string `json:"name"`\n\t\tQueue string `json:"queue"`\n\t}{\n\t\tRequest: struct {\n\t\t\tURL string `json:"url"`\n\t\t\tBody string `json:"body"`\n\t\t\tHeaders map[string]string `json:"headers"`\n\t\t}{\n\t\t\tURL: "https://example.com",\n\t\t\tHeaders: map[string]string{\n\t\t\t\t"Authorization": "fake-secret-token",\n\t\t\t\t"Content-Type": "application/json",\n\t\t\t},\n\t\t\tBody: "Hello, world!",\n\t\t},\n\t\tName: "4cf95241-fa19-47ef-8a67-71e483747649",\n\t\tQueue: "default",\n\t}\n\n\ttaskJSON, err := json.Marshal(task)\n\tif err != nil {\n\t\treturn fmt.Errorf("marshaling task: %w", err)\n\t}\n\n\treq, err := http.NewRequest(http.MethodPost, "https://api.mergent.co/v2/tasks", bytes.NewBuffer(taskJSON))\n\tif err != nil {\n\t\treturn fmt.Errorf("creating request: %w", err)\n\t}\n\n\treq.Header.Add("Authorization", "Bearer ")\n\treq.Header.Add("Content-Type", "application/json")\n\n\tclient := &http.Client{}\n\tres, err := client.Do(req)\n\tif err != nil {\n\t\treturn fmt.Errorf("sending request: %w", err)\n\t}\n\tdefer res.Body.Close()\n\n\tfmt.Printf("Mergent task created with status: %d\\n", res.StatusCode)\n\n\t// > Running a task (Hatchet)\n\tprocessor := ImageProcessor(hatchet)\n\n\tresult, err := processor.Run(context.Background(), ImageProcessInput{\n\t\tImageURL: "https://example.com/image.png",\n\t\tFilters: []string{"blur"},\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Printf("Result: %+v\\n", result)\n\n\t// > Scheduling tasks (Hatchet)\n\t// Schedule the task to run at a specific time\n\tscheduleRef, err := processor.Schedule(\n\t\tcontext.Background(),\n\t\ttime.Now().Add(time.Second*10),\n\t\tImageProcessInput{\n\t\t\tImageURL: "https://example.com/image.png",\n\t\t\tFilters: []string{"blur"},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// or schedule to run every hour\n\tcronRef, err := processor.Cron(\n\t\tcontext.Background(),\n\t\t"run-hourly",\n\t\t"0 * * * *",\n\t\tImageProcessInput{\n\t\t\tImageURL: "https://example.com/image.png",\n\t\t\tFilters: []string{"blur"},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf("Scheduled tasks with refs: %+v, %+v\\n", scheduleRef, cronRef)\n\treturn nil\n}\n', - source: 'out/go/migration-guides/mergent.go', - blocks: { - before_mergent: { - start: 30, - stop: 51, - }, - after_hatchet: { - start: 54, - stop: 99, - }, - running_a_task: { - start: 104, - stop: 112, - }, - declaring_a_worker: { - start: 118, - stop: 131, - }, - running_a_task_mergent: { - start: 144, - stop: 189, - }, - running_a_task_hatchet: { - start: 192, - stop: 201, - }, - scheduling_tasks_hatchet: { - start: 204, - stop: 226, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/index.ts deleted file mode 100644 index 4f46a5516..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import * as run from './run'; -import * as worker from './worker'; - -export { run }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/run/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/run/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/run/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/run/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/run/main.ts deleted file mode 100644 index 7fcf6885c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/run/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\n\thatchet_client "github.com/hatchet-dev/hatchet/pkg/examples/quickstart/hatchet_client"\n\tworkflows "github.com/hatchet-dev/hatchet/pkg/examples/quickstart/workflows"\n)\n\nfunc main() {\n\thatchet, err := hatchet_client.HatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsimple := workflows.FirstTask(hatchet)\n\n\tresult, err := simple.Run(context.Background(), workflows.SimpleInput{\n\t\tMessage: "Hello, World!",\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(\n\t\t"Finished running task, and got the transformed message! The transformed message is:",\n\t\tresult.ToLower.TransformedMessage,\n\t)\n}\n', - source: 'out/go/quickstart/cmd/run/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/worker/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/worker/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/worker/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/worker/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/worker/main.ts deleted file mode 100644 index 0ac762665..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/cmd/worker/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\thatchet_client "github.com/hatchet-dev/hatchet/pkg/examples/quickstart/hatchet_client"\n\tworkflows "github.com/hatchet-dev/hatchet/pkg/examples/quickstart/workflows"\n\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/worker"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n)\n\nfunc main() {\n\n\thatchet, err := hatchet_client.HatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := hatchet.Worker(\n\t\tworker.WorkerOpts{\n\t\t\tName: "first-worker",\n\t\t\tWorkflows: []workflow.WorkflowBase{\n\t\t\t\tworkflows.FirstTask(hatchet),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// we construct an interrupt context to handle Ctrl+C\n\t// you can pass in your own context.Context here to the worker\n\tinterruptCtx, cancel := cmdutils.NewInterruptContext()\n\n\tdefer cancel()\n\n\terr = worker.StartBlocking(interruptCtx)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n', - source: 'out/go/quickstart/cmd/worker/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/hatchet_client/hatchet_client.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/hatchet_client/hatchet_client.ts deleted file mode 100644 index 3b809d41d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/hatchet_client/hatchet_client.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package hatchet_client\n\nimport (\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/joho/godotenv"\n)\n\nfunc HatchetClient() (v1.HatchetClient, error) {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn v1.NewHatchetClient()\n}\n', - source: 'out/go/quickstart/hatchet_client/hatchet_client.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/hatchet_client/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/hatchet_client/index.ts deleted file mode 100644 index 42f7311f2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/hatchet_client/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import hatchet_client from './hatchet_client'; - -export { hatchet_client }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/index.ts deleted file mode 100644 index 26684ed22..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import * as cmd from './cmd'; -import * as hatchet_client from './hatchet_client'; -import * as workflows from './workflows'; - -export { cmd }; -export { hatchet_client }; -export { workflows }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/workflows/first_task.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/workflows/first_task.ts deleted file mode 100644 index 3c3b614d1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/workflows/first_task.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package workflows\n\nimport (\n\t"fmt"\n\t"strings"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype SimpleInput struct {\n\tMessage string `json:"message"`\n}\n\ntype LowerOutput struct {\n\tTransformedMessage string `json:"transformed_message"`\n}\n\ntype SimpleResult struct {\n\tToLower LowerOutput\n}\n\nfunc FirstTask(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[SimpleInput, SimpleResult] {\n\tsimple := factory.NewWorkflow[SimpleInput, SimpleResult](\n\t\tcreate.WorkflowCreateOpts[SimpleInput]{\n\t\t\tName: "first-task",\n\t\t},\n\t\thatchet,\n\t)\n\n\tsimple.Task(\n\t\tcreate.WorkflowTask[SimpleInput, SimpleResult]{\n\t\t\tName: "first-task",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input SimpleInput) (any, error) {\n\t\t\tfmt.Println("first-task task called")\n\t\t\treturn &LowerOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn simple\n}\n', - source: 'out/go/quickstart/workflows/first_task.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/workflows/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/workflows/index.ts deleted file mode 100644 index 46bc6d9be..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/quickstart/workflows/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import first_task from './first_task'; - -export { first_task }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/run/all.ts b/frontend/app/src/next/lib/docs/generated/snips/go/run/all.ts deleted file mode 100644 index e63e0be08..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/run/all.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"math/rand"\n\t"os"\n\t"time"\n\n\t"github.com/google/uuid"\n\tv1_workflows "github.com/hatchet-dev/hatchet/examples/go/workflows"\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/rest"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/joho/godotenv"\n\t"github.com/oapi-codegen/runtime/types"\n)\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Get workflow name from command line arguments\n\tvar workflowName string\n\tif len(os.Args) > 1 {\n\t\tworkflowName = os.Args[1]\n\t\tfmt.Println("workflow name provided:", workflowName)\n\t} else {\n\t\tfmt.Println("No workflow name provided. Defaulting to \'simple\'")\n\t\tworkflowName = "simple"\n\t}\n\n\tctx := context.Background()\n\n\t// Define workflow runners map\n\trunnerMap := map[string]func() error{\n\t\t"simple": func() error {\n\t\t\tsimple := v1_workflows.Simple(hatchet)\n\t\t\tresult, err := simple.Run(ctx, v1_workflows.SimpleInput{\n\t\t\t\tMessage: "Hello, World!",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(result.TransformedMessage)\n\t\t\treturn nil\n\t\t},\n\t\t"child": func() error {\n\t\t\tparent := v1_workflows.Parent(hatchet)\n\n\t\t\tresult, err := parent.Run(ctx, v1_workflows.ParentInput{\n\t\t\t\tN: 50,\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println("Parent result:", result.Result)\n\t\t\treturn nil\n\t\t},\n\t\t"dag": func() error {\n\t\t\tdag := v1_workflows.DagWorkflow(hatchet)\n\t\t\tresult, err := dag.Run(ctx, v1_workflows.DagInput{\n\t\t\t\tMessage: "Hello, DAG!",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(result.Step1.Step)\n\t\t\tfmt.Println(result.Step2.Step)\n\t\t\treturn nil\n\t\t},\n\t\t"sleep": func() error {\n\t\t\tsleep := v1_workflows.DurableSleep(hatchet)\n\t\t\t_, err := sleep.Run(ctx, v1_workflows.DurableSleepInput{\n\t\t\t\tMessage: "Hello, Sleep!",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println("Sleep workflow completed")\n\t\t\treturn nil\n\t\t},\n\t\t"durable-event": func() error {\n\t\t\tdurableEventWorkflow := v1_workflows.DurableEvent(hatchet)\n\t\t\trun, err := durableEventWorkflow.RunNoWait(ctx, v1_workflows.DurableEventInput{\n\t\t\t\tMessage: "Hello, World!",\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t_, err = hatchet.Runs().Cancel(ctx, rest.V1CancelTaskRequest{\n\t\t\t\tExternalIds: &[]types.UUID{uuid.MustParse(run.WorkflowRunId())},\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\n\t\t\t_, err = run.Result()\n\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println("Received expected error:", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println("Cancellation workflow completed unexpectedly")\n\t\t\treturn nil\n\t\t},\n\t\t"timeout": func() error {\n\t\t\ttimeout := v1_workflows.Timeout(hatchet)\n\t\t\t_, err := timeout.Run(ctx, v1_workflows.TimeoutInput{})\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println("Received expected error:", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println("Timeout workflow completed unexpectedly")\n\t\t\treturn nil\n\t\t},\n\t\t"sticky": func() error {\n\t\t\tsticky := v1_workflows.Sticky(hatchet)\n\t\t\tresult, err := sticky.Run(ctx, v1_workflows.StickyInput{})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println("Value from child workflow:", result.Result)\n\t\t\treturn nil\n\t\t},\n\t\t"sticky-dag": func() error {\n\t\t\tstickyDag := v1_workflows.StickyDag(hatchet)\n\t\t\tresult, err := stickyDag.Run(ctx, v1_workflows.StickyInput{})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println("Value from task 1:", result.StickyTask1.Result)\n\t\t\tfmt.Println("Value from task 2:", result.StickyTask2.Result)\n\t\t\treturn nil\n\t\t},\n\t\t"retries": func() error {\n\t\t\tretries := v1_workflows.Retries(hatchet)\n\t\t\t_, err := retries.Run(ctx, v1_workflows.RetriesInput{})\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println("Received expected error:", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println("Retries workflow completed unexpectedly")\n\t\t\treturn nil\n\t\t},\n\t\t"retries-count": func() error {\n\t\t\tretriesCount := v1_workflows.RetriesWithCount(hatchet)\n\t\t\tresult, err := retriesCount.Run(ctx, v1_workflows.RetriesWithCountInput{})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println("Result message:", result.Message)\n\t\t\treturn nil\n\t\t},\n\t\t"with-backoff": func() error {\n\t\t\twithBackoff := v1_workflows.WithBackoff(hatchet)\n\t\t\t_, err := withBackoff.Run(ctx, v1_workflows.BackoffInput{})\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println("Received expected error:", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println("WithBackoff workflow completed unexpectedly")\n\t\t\treturn nil\n\t\t},\n\t\t"non-retryable": func() error {\n\t\t\tnonRetryable := v1_workflows.NonRetryableError(hatchet)\n\t\t\t_, err := nonRetryable.Run(ctx, v1_workflows.NonRetryableInput{})\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println("Received expected error:", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println("NonRetryable workflow completed unexpectedly")\n\t\t\treturn nil\n\t\t},\n\t\t"on-cron": func() error {\n\t\t\tcronTask := v1_workflows.OnCron(hatchet)\n\t\t\tresult, err := cronTask.Run(ctx, v1_workflows.OnCronInput{\n\t\t\t\tMessage: "Hello, Cron!",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println("Cron task result:", result.Job.TransformedMessage)\n\t\t\treturn nil\n\t\t},\n\t\t"priority": func() error {\n\n\t\t\tnRuns := 10\n\t\t\tpriorityWorkflow := v1_workflows.Priority(hatchet)\n\n\t\t\tfor i := 0; i < nRuns; i++ {\n\t\t\t\trandomPrio := int32(rand.Intn(3) + 1)\n\n\t\t\t\tfmt.Println("Random priority:", randomPrio)\n\n\t\t\t\tpriorityWorkflow.RunNoWait(ctx, v1_workflows.PriorityInput{\n\t\t\t\t\tUserId: "1234",\n\t\t\t\t}, client.WithRunMetadata(map[string]int32{"priority": randomPrio}), client.WithPriority(randomPrio))\n\t\t\t}\n\n\t\t\ttriggerAt := time.Now().Add(time.Second + 5)\n\n\t\t\tfor i := 0; i < nRuns; i++ {\n\t\t\t\trandomPrio := int32(rand.Intn(3) + 1)\n\n\t\t\t\tfmt.Println("Random priority:", randomPrio)\n\n\t\t\t\tpriorityWorkflow.Schedule(ctx, triggerAt, v1_workflows.PriorityInput{\n\t\t\t\t\tUserId: "1234",\n\t\t\t\t}, client.WithRunMetadata(map[string]int32{"priority": randomPrio}), client.WithPriority(randomPrio))\n\t\t\t}\n\n\t\t\treturn nil\n\t\t},\n\t}\n\n\t// Lookup workflow runner from map\n\trunner, ok := runnerMap[workflowName]\n\tif !ok {\n\t\tfmt.Println("Invalid workflow name provided. Usage: go run examples/v1/run/simple.go [workflow-name]")\n\t\tfmt.Println("Available workflows:", getAvailableWorkflows(runnerMap))\n\t\tos.Exit(1)\n\t}\n\n\t// Run the selected workflow\n\terr = runner()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n\n// Helper function to get available workflows as a formatted string\nfunc getAvailableWorkflows(runnerMap map[string]func() error) string {\n\tvar workflows string\n\tcount := 0\n\tfor name := range runnerMap {\n\t\tif count > 0 {\n\t\t\tworkflows += ", "\n\t\t}\n\t\tworkflows += fmt.Sprintf("\'%s\'", name)\n\t\tcount++\n\t}\n\treturn workflows\n}\n', - source: 'out/go/run/all.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/run/bulk.ts b/frontend/app/src/next/lib/docs/generated/snips/go/run/bulk.ts deleted file mode 100644 index bc091ce53..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/run/bulk.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\n\tv1_workflows "github.com/hatchet-dev/hatchet/examples/go/workflows"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/joho/godotenv"\n)\n\nfunc bulk() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tctx := context.Background()\n\t// > Bulk Run Tasks\n\tsimple := v1_workflows.Simple(hatchet)\n\tbulkRunIds, err := simple.RunBulkNoWait(ctx, []v1_workflows.SimpleInput{\n\t\t{\n\t\t\tMessage: "Hello, World!",\n\t\t},\n\t\t{\n\t\t\tMessage: "Hello, Moon!",\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(bulkRunIds)\n}\n', - source: 'out/go/run/bulk.go', - blocks: { - bulk_run_tasks: { - start: 26, - stop: 40, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/run/cron.ts b/frontend/app/src/next/lib/docs/generated/snips/go/run/cron.ts deleted file mode 100644 index 58f2e363c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/run/cron.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\n\tv1_workflows "github.com/hatchet-dev/hatchet/examples/go/workflows"\n\t"github.com/hatchet-dev/hatchet/pkg/client/rest"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/joho/godotenv"\n)\n\nfunc cron() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// > Create\n\tsimple := v1_workflows.Simple(hatchet)\n\n\tctx := context.Background()\n\n\tresult, err := simple.Cron(\n\t\tctx,\n\t\t"daily-run",\n\t\t"0 0 * * *",\n\t\tv1_workflows.SimpleInput{\n\t\t\tMessage: "Hello, World!",\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// it may be useful to save the cron id for later\n\tfmt.Println(result.Metadata.Id)\n\n\t// > Delete\n\thatchet.Crons().Delete(ctx, result.Metadata.Id)\n\n\t// > List\n\tcrons, err := hatchet.Crons().List(ctx, rest.CronWorkflowListParams{\n\t\tAdditionalMetadata: &[]string{"user:daily-run"},\n\t})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(crons)\n}\n', - source: 'out/go/run/cron.go', - blocks: { - create: { - start: 25, - stop: 43, - }, - delete: { - start: 46, - stop: 46, - }, - list: { - start: 49, - stop: 51, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/run/event.ts b/frontend/app/src/next/lib/docs/generated/snips/go/run/event.ts deleted file mode 100644 index a1086dab4..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/run/event.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\n\t"github.com/google/uuid"\n\t"github.com/joho/godotenv"\n\n\tv1_workflows "github.com/hatchet-dev/hatchet/examples/go/workflows"\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/rest"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n)\n\nfunc event() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// > Pushing an Event\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t"simple-event:create",\n\t\tv1_workflows.SimpleInput{\n\t\t\tMessage: "Hello, World!",\n\t\t},\n\t)\n\n\t// > Create a filter\n\tpayload := map[string]interface{}{\n\t\t"main_character": "Anna",\n\t\t"supporting_character": "Stiva",\n\t\t"location": "Moscow",\n\t}\n\n\t_, err = hatchet.Filters().Create(\n\t\tcontext.Background(),\n\t\trest.V1CreateFilterRequest{\n\t\t\tWorkflowId: uuid.New(),\n\t\t\tExpression: "input.shouldSkip == false",\n\t\t\tScope: "foobarbaz",\n\t\t\tPayload: &payload,\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > Skip a run\n\tskipPayload := map[string]interface{}{\n\t\t"shouldSkip": true,\n\t}\n\tskipScope := "foobarbaz"\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t"simple-event:create",\n\t\tskipPayload,\n\t\tclient.WithFilterScope(&skipScope),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > Trigger a run\n\ttriggerPayload := map[string]interface{}{\n\t\t"shouldSkip": false,\n\t}\n\ttriggerScope := "foobarbaz"\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t"simple-event:create",\n\t\ttriggerPayload,\n\t\tclient.WithFilterScope(&triggerScope),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n', - source: 'out/go/run/event.go', - blocks: { - pushing_an_event: { - start: 27, - stop: 33, - }, - create_a_filter: { - start: 36, - stop: 50, - }, - skip_a_run: { - start: 57, - stop: 66, - }, - trigger_a_run: { - start: 73, - stop: 82, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/run/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/run/index.ts deleted file mode 100644 index 50679c525..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/run/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import all from './all'; -import bulk from './bulk'; -import cron from './cron'; -import event from './event'; -import priority from './priority'; -import simple from './simple'; - -export { all }; -export { bulk }; -export { cron }; -export { event }; -export { priority }; -export { simple }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/run/priority.ts b/frontend/app/src/next/lib/docs/generated/snips/go/run/priority.ts deleted file mode 100644 index d8e65985f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/run/priority.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\tv1_workflows "github.com/hatchet-dev/hatchet/examples/go/workflows"\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/joho/godotenv"\n)\n\nfunc priority() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tctx := context.Background()\n\n\tpriorityWorkflow := v1_workflows.Priority(hatchet)\n\n\t// > Running a Task with Priority\n\tpriority := int32(3)\n\n\trunId, err := priorityWorkflow.RunNoWait(ctx, v1_workflows.PriorityInput{\n\t\tUserId: "1234",\n\t}, client.WithPriority(priority))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(runId)\n\n\t// > Schedule and cron\n\tschedulePriority := int32(3)\n\trunAt := time.Now().Add(time.Minute)\n\n\tscheduledRunId, _ := priorityWorkflow.Schedule(ctx, runAt, v1_workflows.PriorityInput{\n\t\tUserId: "1234",\n\t}, client.WithPriority(schedulePriority))\n\n\tcronId, _ := priorityWorkflow.Cron(ctx, "my-cron", "* * * * *", v1_workflows.PriorityInput{\n\t\tUserId: "1234",\n\t}, client.WithPriority(schedulePriority))\n\n\tfmt.Println(scheduledRunId)\n\tfmt.Println(cronId)\n\n}\n', - source: 'out/go/run/priority.go', - blocks: { - running_a_task_with_priority: { - start: 31, - stop: 35, - }, - schedule_and_cron: { - start: 44, - stop: 53, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/run/simple.ts b/frontend/app/src/next/lib/docs/generated/snips/go/run/simple.ts deleted file mode 100644 index 42fcaf516..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/run/simple.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"sync"\n\n\tv1_workflows "github.com/hatchet-dev/hatchet/examples/go/workflows"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/joho/godotenv"\n)\n\nfunc simple() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tctx := context.Background()\n\t// > Running a Task\n\tsimple := v1_workflows.Simple(hatchet)\n\tresult, err := simple.Run(ctx, v1_workflows.SimpleInput{\n\t\tMessage: "Hello, World!",\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(result.TransformedMessage)\n\n\t// > Running Multiple Tasks\n\tvar results []string\n\tvar resultsMutex sync.Mutex\n\tvar errs []error\n\tvar errsMutex sync.Mutex\n\n\twg := sync.WaitGroup{}\n\twg.Add(2)\n\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tresult, err := simple.Run(ctx, v1_workflows.SimpleInput{\n\t\t\tMessage: "Hello, World!",\n\t\t})\n\n\t\tif err != nil {\n\t\t\terrsMutex.Lock()\n\t\t\terrs = append(errs, err)\n\t\t\terrsMutex.Unlock()\n\t\t\treturn\n\t\t}\n\n\t\tresultsMutex.Lock()\n\t\tresults = append(results, result.TransformedMessage)\n\t\tresultsMutex.Unlock()\n\t}()\n\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tresult, err := simple.Run(ctx, v1_workflows.SimpleInput{\n\t\t\tMessage: "Hello, Moon!",\n\t\t})\n\n\t\tif err != nil {\n\t\t\terrsMutex.Lock()\n\t\t\terrs = append(errs, err)\n\t\t\terrsMutex.Unlock()\n\t\t\treturn\n\t\t}\n\n\t\tresultsMutex.Lock()\n\t\tresults = append(results, result.TransformedMessage)\n\t\tresultsMutex.Unlock()\n\t}()\n\n\twg.Wait()\n\n\t// > Running a Task Without Waiting\n\tsimple = v1_workflows.Simple(hatchet)\n\trunRef, err := simple.RunNoWait(ctx, v1_workflows.SimpleInput{\n\t\tMessage: "Hello, World!",\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// The Run Ref Exposes an ID that can be used to wait for the task to complete\n\t// or check on the status of the task\n\trunId := runRef.RunId()\n\tfmt.Println(runId)\n\n\t// > Subscribing to results\n\t// finally, we can wait for the task to complete and get the result\n\tfinalResult, err := runRef.Result()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(finalResult)\n}\n', - source: 'out/go/run/simple.go', - blocks: { - running_a_task: { - start: 27, - stop: 36, - }, - running_multiple_tasks: { - start: 39, - stop: 83, - }, - running_a_task_without_waiting: { - start: 86, - stop: 98, - }, - subscribing_to_results: { - start: 101, - stop: 108, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/consumer/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/consumer/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/consumer/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/consumer/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/consumer/main.ts deleted file mode 100644 index 56bb57851..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/consumer/main.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\n\t"github.com/hatchet-dev/hatchet/examples/go/streaming/shared"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n)\n\n// > Consume\nfunc main() {\n\thatchet, err := v1.NewHatchetClient()\n\tif err != nil {\n\t\tlog.Fatalf("Failed to create Hatchet client: %v", err)\n\t}\n\n\tctx := context.Background()\n\n\tstreamingWorkflow := shared.StreamingWorkflow(hatchet)\n\n\tworkflowRun, err := streamingWorkflow.RunNoWait(ctx, shared.StreamTaskInput{})\n\tif err != nil {\n\t\tlog.Fatalf("Failed to run workflow: %v", err)\n\t}\n\n\tid := workflowRun.RunId()\n\tstream, err := hatchet.Runs().SubscribeToStream(ctx, id)\n\tif err != nil {\n\t\tlog.Fatalf("Failed to subscribe to stream: %v", err)\n\t}\n\n\tfor content := range stream {\n\t\tfmt.Print(content)\n\t}\n\n\tfmt.Println("\\nStreaming completed!")\n}\n\n', - source: 'out/go/streaming/consumer/main.go', - blocks: { - consume: { - start: 13, - stop: 40, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/index.ts deleted file mode 100644 index 94182226d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import * as consumer from './consumer'; -import * as server from './server'; -import * as shared from './shared'; -import * as worker from './worker'; - -export { consumer }; -export { server }; -export { shared }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/server/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/server/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/server/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/server/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/server/main.ts deleted file mode 100644 index aed0686ee..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/server/main.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\t"net/http"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/examples/go/streaming/shared"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n)\n\n// > Server\nfunc main() {\n\thatchet, err := v1.NewHatchetClient()\n\tif err != nil {\n\t\tlog.Fatalf("Failed to create Hatchet client: %v", err)\n\t}\n\n\tstreamingWorkflow := shared.StreamingWorkflow(hatchet)\n\n\thttp.HandleFunc("/stream", func(w http.ResponseWriter, r *http.Request) {\n\t\tctx := context.Background()\n\n\t\tw.Header().Set("Content-Type", "text/plain")\n\t\tw.Header().Set("Cache-Control", "no-cache")\n\t\tw.Header().Set("Connection", "keep-alive")\n\n\t\tworkflowRun, err := streamingWorkflow.RunNoWait(ctx, shared.StreamTaskInput{})\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tstream, err := hatchet.Runs().SubscribeToStream(ctx, workflowRun.RunId())\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tflusher, _ := w.(http.Flusher)\n\t\tfor content := range stream {\n\t\t\tfmt.Fprint(w, content)\n\t\t\tif flusher != nil {\n\t\t\t\tflusher.Flush()\n\t\t\t}\n\t\t}\n\t})\n\n\tserver := &http.Server{\n\t\tAddr: ":8000",\n\t\tReadTimeout: 5 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t}\n\n\tif err := server.ListenAndServe(); err != nil {\n\t\tlog.Println("Failed to start server:", err)\n\t}\n}\n\n', - source: 'out/go/streaming/server/main.go', - blocks: { - server: { - start: 15, - stop: 61, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/shared/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/shared/index.ts deleted file mode 100644 index d7cc4ef29..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/shared/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import task from './task'; - -export { task }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/shared/task.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/shared/task.ts deleted file mode 100644 index 45dfc7c29..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/shared/task.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package shared\n\nimport (\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype StreamTaskInput struct{}\n\ntype StreamTaskOutput struct {\n\tMessage string `json:"message"`\n}\n\n// > Streaming\nconst annaKarenina = `\nHappy families are all alike; every unhappy family is unhappy in its own way.\n\nEverything was in confusion in the Oblonskys\' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him.\n`\n\nfunc createChunks(content string, n int) []string {\n\tvar chunks []string\n\tfor i := 0; i < len(content); i += n {\n\t\tend := i + n\n\t\tif end > len(content) {\n\t\t\tend = len(content)\n\t\t}\n\t\tchunks = append(chunks, content[i:end])\n\t}\n\treturn chunks\n}\n\nfunc StreamTask(ctx worker.HatchetContext, input StreamTaskInput) (*StreamTaskOutput, error) {\n\ttime.Sleep(2 * time.Second)\n\n\tchunks := createChunks(annaKarenina, 10)\n\n\tfor _, chunk := range chunks {\n\t\tctx.PutStream(chunk)\n\t\ttime.Sleep(200 * time.Millisecond)\n\t}\n\n\treturn &StreamTaskOutput{\n\t\tMessage: "Streaming completed",\n\t}, nil\n}\n\n\nfunc StreamingWorkflow(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[StreamTaskInput, StreamTaskOutput] {\n\treturn factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "stream-example",\n\t\t},\n\t\tStreamTask,\n\t\thatchet,\n\t)\n}\n', - source: 'out/go/streaming/shared/task.go', - blocks: { - streaming: { - start: 20, - stop: 52, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/worker/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/worker/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/worker/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/worker/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/streaming/worker/main.ts deleted file mode 100644 index 279440a7b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/streaming/worker/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"log"\n\n\t"github.com/hatchet-dev/hatchet/examples/go/streaming/shared"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\tv1worker "github.com/hatchet-dev/hatchet/pkg/v1/worker"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n)\n\nfunc main() {\n\thatchet, err := v1.NewHatchetClient()\n\tif err != nil {\n\t\tlog.Fatalf("Failed to create Hatchet client: %v", err)\n\t}\n\n\tstreamingWorkflow := shared.StreamingWorkflow(hatchet)\n\n\tw, err := hatchet.Worker(v1worker.WorkerOpts{\n\t\tName: "streaming-worker",\n\t\tWorkflows: []workflow.WorkflowBase{\n\t\t\tstreamingWorkflow,\n\t\t},\n\t})\n\tif err != nil {\n\t\tlog.Fatalf("Failed to create worker: %v", err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.NewInterruptContext()\n\tdefer cancel()\n\n\tlog.Println("Starting streaming worker...")\n\n\tif err := w.StartBlocking(interruptCtx); err != nil {\n\t\tlog.Println("Worker failed:", err)\n\t}\n}\n', - source: 'out/go/streaming/worker/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/worker/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/worker/index.ts deleted file mode 100644 index 90029475e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/worker/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import start from './start'; - -export { start }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/worker/start.ts b/frontend/app/src/next/lib/docs/generated/snips/go/worker/start.ts deleted file mode 100644 index 16afb10a4..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/worker/start.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"os"\n\t"time"\n\n\tv1_workflows "github.com/hatchet-dev/hatchet/examples/go/workflows"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/worker"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/joho/godotenv"\n)\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Get workflow name from command line arguments\n\tvar workflowName string\n\tif len(os.Args) > 1 {\n\t\tworkflowName = os.Args[1]\n\t\tfmt.Println("workflow name provided:", workflowName)\n\t}\n\n\t// Define workflows map\n\tworkflowMap := map[string][]workflow.WorkflowBase{\n\t\t"dag": {v1_workflows.DagWorkflow(hatchet)},\n\t\t"on-failure": {v1_workflows.OnFailure(hatchet)},\n\t\t"simple": {v1_workflows.Simple(hatchet)},\n\t\t"sleep": {v1_workflows.DurableSleep(hatchet)},\n\t\t"child": {v1_workflows.Parent(hatchet), v1_workflows.Child(hatchet)},\n\t\t"cancellation": {v1_workflows.Cancellation(hatchet)},\n\t\t"timeout": {v1_workflows.Timeout(hatchet)},\n\t\t"sticky": {v1_workflows.Sticky(hatchet), v1_workflows.StickyDag(hatchet), v1_workflows.Child(hatchet)},\n\t\t"retries": {v1_workflows.Retries(hatchet), v1_workflows.RetriesWithCount(hatchet), v1_workflows.WithBackoff(hatchet)},\n\t\t"on-cron": {v1_workflows.OnCron(hatchet)},\n\t\t"non-retryable": {v1_workflows.NonRetryableError(hatchet)},\n\t\t"priority": {v1_workflows.Priority(hatchet)},\n\t}\n\n\t// Add an "all" option that registers all workflows\n\tallWorkflows := []workflow.WorkflowBase{}\n\tfor _, wfs := range workflowMap {\n\t\tallWorkflows = append(allWorkflows, wfs...)\n\t}\n\tworkflowMap["all"] = allWorkflows\n\n\t// Lookup workflow from map\n\tworkflow, ok := workflowMap[workflowName]\n\tif !ok {\n\t\tfmt.Println("Invalid workflow name provided. Usage: go run examples/v1/worker/start.go [workflow-name]")\n\t\tfmt.Println("Available workflows:", getAvailableWorkflows(workflowMap))\n\t\tos.Exit(1)\n\t}\n\n\tvar slots int\n\tif workflowName == "priority" {\n\t\tslots = 1\n\t} else {\n\t\tslots = 100\n\t}\n\n\tworker, err := hatchet.Worker(\n\t\tworker.WorkerOpts{\n\t\t\tName: fmt.Sprintf("%s-worker", workflowName),\n\t\t\tWorkflows: workflow,\n\t\t\tSlots: slots,\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.NewInterruptContext()\n\n\terr = worker.StartBlocking(interruptCtx)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tgo func() {\n\t\ttime.Sleep(10 * time.Second)\n\t\tcancel()\n\t}()\n}\n\n// Helper function to get available workflows as a formatted string\nfunc getAvailableWorkflows(workflowMap map[string][]workflow.WorkflowBase) string {\n\tvar workflows string\n\tcount := 0\n\tfor name := range workflowMap {\n\t\tif count > 0 {\n\t\t\tworkflows += ", "\n\t\t}\n\t\tworkflows += fmt.Sprintf("\'%s\'", name)\n\t\tcount++\n\t}\n\treturn workflows\n}\n', - source: 'out/go/worker/start.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/cancellations.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/cancellations.ts deleted file mode 100644 index b243a4c50..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/cancellations.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"errors"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype CancellationInput struct{}\ntype CancellationResult struct {\n\tCompleted bool\n}\n\nfunc Cancellation(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[CancellationInput, CancellationResult] {\n\n\t// > Cancelled task\n\t// Create a task that sleeps for 10 seconds and checks if it was cancelled\n\tcancellation := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "cancellation-task",\n\t\t}, func(ctx worker.HatchetContext, input CancellationInput) (*CancellationResult, error) {\n\t\t\t// Sleep for 10 seconds\n\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t// Check if the context was cancelled\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil, errors.New("Task was cancelled")\n\t\t\tdefault:\n\t\t\t\t// Continue execution\n\t\t\t}\n\n\t\t\treturn &CancellationResult{\n\t\t\t\tCompleted: true,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn cancellation\n}\n', - source: 'out/go/workflows/cancellations.go', - blocks: { - cancelled_task: { - start: 22, - stop: 43, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/child-workflows.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/child-workflows.ts deleted file mode 100644 index d861235db..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/child-workflows.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype ChildInput struct {\n\tN int `json:"n"`\n}\n\ntype ValueOutput struct {\n\tValue int `json:"value"`\n}\n\ntype ParentInput struct {\n\tN int `json:"n"`\n}\n\ntype SumOutput struct {\n\tResult int `json:"result"`\n}\n\nfunc Child(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ChildInput, ValueOutput] {\n\tchild := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "child",\n\t\t}, func(ctx worker.HatchetContext, input ChildInput) (*ValueOutput, error) {\n\t\t\treturn &ValueOutput{\n\t\t\t\tValue: input.N,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn child\n}\n\nfunc Parent(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ParentInput, SumOutput] {\n\n\tchild := Child(hatchet)\n\tparent := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "parent",\n\t\t}, func(ctx worker.HatchetContext, input ParentInput) (*SumOutput, error) {\n\n\t\t\tsum := 0\n\n\t\t\t// Launch child workflows in parallel\n\t\t\tresults := make([]*ValueOutput, 0, input.N)\n\t\t\tfor j := 0; j < input.N; j++ {\n\t\t\t\tresult, err := child.RunAsChild(ctx, ChildInput{N: j}, workflow.RunAsChildOpts{})\n\n\t\t\t\tif err != nil {\n\t\t\t\t\t// firstErr = err\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\tresults = append(results, result)\n\n\t\t\t}\n\n\t\t\t// Sum results from all children\n\t\t\tfor _, result := range results {\n\t\t\t\tsum += result.Value\n\t\t\t}\n\n\t\t\treturn &SumOutput{\n\t\t\t\tResult: sum,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn parent\n}\n', - source: 'out/go/workflows/child-workflows.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/complex-conditions.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/complex-conditions.ts deleted file mode 100644 index 2c2ccd855..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/complex-conditions.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"math/rand"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n\t"github.com/hatchet-dev/hatchet/pkg/worker/condition"\n)\n\n// StepOutput represents the output of most tasks in this workflow\ntype StepOutput struct {\n\tRandomNumber int `json:"randomNumber"`\n}\n\n// RandomSum represents the output of the sum task\ntype RandomSum struct {\n\tSum int `json:"sum"`\n}\n\n// TaskConditionWorkflowResult represents the aggregate output of all tasks\ntype TaskConditionWorkflowResult struct {\n\tStart StepOutput `json:"start"`\n\tWaitForSleep StepOutput `json:"waitForSleep"`\n\tWaitForEvent StepOutput `json:"waitForEvent"`\n\tSkipOnEvent StepOutput `json:"skipOnEvent"`\n\tLeftBranch StepOutput `json:"leftBranch"`\n\tRightBranch StepOutput `json:"rightBranch"`\n\tSum RandomSum `json:"sum"`\n}\n\n// taskOpts is a type alias for workflow task options\ntype taskOpts = create.WorkflowTask[struct{}, TaskConditionWorkflowResult]\n\nfunc TaskConditionWorkflow(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[struct{}, TaskConditionWorkflowResult] {\n\t// > Create a workflow\n\twf := factory.NewWorkflow[struct{}, TaskConditionWorkflowResult](\n\t\tcreate.WorkflowCreateOpts[struct{}]{\n\t\t\tName: "TaskConditionWorkflow",\n\t\t},\n\t\thatchet,\n\t)\n\n\t// > Add base task\n\tstart := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: "start",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add wait for sleep\n\twaitForSleep := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: "waitForSleep",\n\t\t\tParents: []create.NamedTask{start},\n\t\t\tWaitFor: condition.SleepCondition(time.Second * 10),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add skip on event\n\tskipOnEvent := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: "skipOnEvent",\n\t\t\tParents: []create.NamedTask{start},\n\t\t\tWaitFor: condition.SleepCondition(time.Second * 30),\n\t\t\tSkipIf: condition.UserEventCondition("skip_on_event:skip", "true"),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add branching\n\tleftBranch := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: "leftBranch",\n\t\t\tParents: []create.NamedTask{waitForSleep},\n\t\t\tSkipIf: condition.ParentCondition(waitForSleep, "output.randomNumber > 50"),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\trightBranch := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: "rightBranch",\n\t\t\tParents: []create.NamedTask{waitForSleep},\n\t\t\tSkipIf: condition.ParentCondition(waitForSleep, "output.randomNumber <= 50"),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add wait for event\n\twaitForEvent := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: "waitForEvent",\n\t\t\tParents: []create.NamedTask{start},\n\t\t\tWaitFor: condition.Or(\n\t\t\t\tcondition.SleepCondition(time.Minute),\n\t\t\t\tcondition.UserEventCondition("wait_for_event:start", "true"),\n\t\t\t),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add sum\n\twf.Task(\n\t\ttaskOpts{\n\t\t\tName: "sum",\n\t\t\tParents: []create.NamedTask{\n\t\t\t\tstart,\n\t\t\t\twaitForSleep,\n\t\t\t\twaitForEvent,\n\t\t\t\tskipOnEvent,\n\t\t\t\tleftBranch,\n\t\t\t\trightBranch,\n\t\t\t},\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\tvar startOutput StepOutput\n\t\t\tif err := ctx.ParentOutput(start, &startOutput); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tvar waitForSleepOutput StepOutput\n\t\t\tif err := ctx.ParentOutput(waitForSleep, &waitForSleepOutput); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tvar waitForEventOutput StepOutput\n\t\t\tctx.ParentOutput(waitForEvent, &waitForEventOutput)\n\n\t\t\t// Handle potentially skipped tasks\n\t\t\tvar skipOnEventOutput StepOutput\n\t\t\tvar four int\n\n\t\t\terr := ctx.ParentOutput(skipOnEvent, &skipOnEventOutput)\n\n\t\t\tif err != nil {\n\t\t\t\tfour = 0\n\t\t\t} else {\n\t\t\t\tfour = skipOnEventOutput.RandomNumber\n\t\t\t}\n\n\t\t\tvar leftBranchOutput StepOutput\n\t\t\tvar five int\n\n\t\t\terr = ctx.ParentOutput(leftBranch, leftBranchOutput)\n\t\t\tif err != nil {\n\t\t\t\tfive = 0\n\t\t\t} else {\n\t\t\t\tfive = leftBranchOutput.RandomNumber\n\t\t\t}\n\n\t\t\tvar rightBranchOutput StepOutput\n\t\t\tvar six int\n\n\t\t\terr = ctx.ParentOutput(rightBranch, rightBranchOutput)\n\t\t\tif err != nil {\n\t\t\t\tsix = 0\n\t\t\t} else {\n\t\t\t\tsix = rightBranchOutput.RandomNumber\n\t\t\t}\n\n\t\t\treturn &RandomSum{\n\t\t\t\tSum: startOutput.RandomNumber + waitForEventOutput.RandomNumber +\n\t\t\t\t\twaitForSleepOutput.RandomNumber + four + five + six,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn wf\n}\n', - source: 'out/go/workflows/complex-conditions.go', - blocks: { - create_a_workflow: { - start: 41, - stop: 46, - }, - add_base_task: { - start: 49, - stop: 58, - }, - add_wait_for_sleep: { - start: 61, - stop: 72, - }, - add_skip_on_event: { - start: 75, - stop: 87, - }, - add_branching: { - start: 90, - stop: 114, - }, - add_wait_for_event: { - start: 117, - stop: 131, - }, - add_sum: { - start: 134, - stop: 197, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/concurrency-rr.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/concurrency-rr.ts deleted file mode 100644 index 506b1569b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/concurrency-rr.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"math/rand"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype ConcurrencyInput struct {\n\tMessage string\n\tTier string\n\tAccount string\n}\n\ntype TransformedOutput struct {\n\tTransformedMessage string\n}\n\nfunc ConcurrencyRoundRobin(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ConcurrencyInput, TransformedOutput] {\n\t// > Concurrency Strategy With Key\n\tvar maxRuns int32 = 1\n\tstrategy := types.GroupRoundRobin\n\n\tconcurrency := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "simple-concurrency",\n\t\t\tConcurrency: []*types.Concurrency{\n\t\t\t\t{\n\t\t\t\t\tExpression: "input.GroupKey",\n\t\t\t\t\tMaxRuns: &maxRuns,\n\t\t\t\t\tLimitStrategy: &strategy,\n\t\t\t\t},\n\t\t\t},\n\t\t}, func(ctx worker.HatchetContext, input ConcurrencyInput) (*TransformedOutput, error) {\n\t\t\t// Random sleep between 200ms and 1000ms\n\t\t\ttime.Sleep(time.Duration(200+rand.Intn(800)) * time.Millisecond)\n\n\t\t\treturn &TransformedOutput{\n\t\t\t\tTransformedMessage: input.Message,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn concurrency\n}\n\nfunc MultipleConcurrencyKeys(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ConcurrencyInput, TransformedOutput] {\n\t// > Multiple Concurrency Keys\n\tstrategy := types.GroupRoundRobin\n\tvar maxRuns int32 = 20\n\n\tconcurrency := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "simple-concurrency",\n\t\t\tConcurrency: []*types.Concurrency{\n\t\t\t\t{\n\t\t\t\t\tExpression: "input.Tier",\n\t\t\t\t\tMaxRuns: &maxRuns,\n\t\t\t\t\tLimitStrategy: &strategy,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tExpression: "input.Account",\n\t\t\t\t\tMaxRuns: &maxRuns,\n\t\t\t\t\tLimitStrategy: &strategy,\n\t\t\t\t},\n\t\t\t},\n\t\t}, func(ctx worker.HatchetContext, input ConcurrencyInput) (*TransformedOutput, error) {\n\t\t\t// Random sleep between 200ms and 1000ms\n\t\t\ttime.Sleep(time.Duration(200+rand.Intn(800)) * time.Millisecond)\n\n\t\t\treturn &TransformedOutput{\n\t\t\t\tTransformedMessage: input.Message,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn concurrency\n}\n', - source: 'out/go/workflows/concurrency-rr.go', - blocks: { - concurrency_strategy_with_key: { - start: 27, - stop: 49, - }, - multiple_concurrency_keys: { - start: 56, - stop: 83, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/dag-with-conditions.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/dag-with-conditions.ts deleted file mode 100644 index 0e82baae1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/dag-with-conditions.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"fmt"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype DagWithConditionsInput struct {\n\tMessage string\n}\n\ntype DagWithConditionsResult struct {\n\tStep1 SimpleOutput\n\tStep2 SimpleOutput\n}\n\ntype conditionOpts = create.WorkflowTask[DagWithConditionsInput, DagWithConditionsResult]\n\nfunc DagWithConditionsWorkflow(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[DagWithConditionsInput, DagWithConditionsResult] {\n\n\tsimple := factory.NewWorkflow[DagWithConditionsInput, DagWithConditionsResult](\n\t\tcreate.WorkflowCreateOpts[DagWithConditionsInput]{\n\t\t\tName: "simple-dag",\n\t\t},\n\t\thatchet,\n\t)\n\n\tstep1 := simple.Task(\n\t\tconditionOpts{\n\t\t\tName: "Step1",\n\t\t}, func(ctx worker.HatchetContext, input DagWithConditionsInput) (interface{}, error) {\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\tsimple.Task(\n\t\tconditionOpts{\n\t\t\tName: "Step2",\n\t\t\tParents: []create.NamedTask{\n\t\t\t\tstep1,\n\t\t\t},\n\t\t}, func(ctx worker.HatchetContext, input DagWithConditionsInput) (interface{}, error) {\n\n\t\t\tvar step1Output SimpleOutput\n\t\t\terr := ctx.ParentOutput(step1, &step1Output)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tfmt.Println(step1Output.Step)\n\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 2,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn simple\n}\n', - source: 'out/go/workflows/dag-with-conditions.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/dag.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/dag.ts deleted file mode 100644 index 58373c12f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/dag.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype DagInput struct {\n\tMessage string\n}\n\ntype SimpleOutput struct {\n\tStep int\n}\n\ntype DagResult struct {\n\tStep1 SimpleOutput\n\tStep2 SimpleOutput\n}\n\nfunc DagWorkflow(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[DagInput, DagResult] {\n\t// > Declaring a Workflow\n\tsimple := factory.NewWorkflow[DagInput, DagResult](\n\t\tcreate.WorkflowCreateOpts[DagInput]{\n\t\t\tName: "simple-dag",\n\n\t\t},\n\t\thatchet,\n\t)\n\n\t// > Defining a Task\n\tsimple.Task(\n\t\tcreate.WorkflowTask[DagInput, DagResult]{\n\t\t\tName: "step",\n\t\t}, func(ctx worker.HatchetContext, input DagInput) (interface{}, error) {\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Adding a Task with a parent\n\tstep1 := simple.Task(\n\t\tcreate.WorkflowTask[DagInput, DagResult]{\n\t\t\tName: "step-1",\n\t\t}, func(ctx worker.HatchetContext, input DagInput) (interface{}, error) {\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\tsimple.Task(\n\t\tcreate.WorkflowTask[DagInput, DagResult]{\n\t\t\tName: "step-2",\n\t\t\tParents: []create.NamedTask{\n\t\t\t\tstep1,\n\t\t\t},\n\t\t}, func(ctx worker.HatchetContext, input DagInput) (interface{}, error) {\n\t\t\t// Get the output of the parent task\n\t\t\tvar step1Output SimpleOutput\n\t\t\terr := ctx.ParentOutput(step1, &step1Output)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 2,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn simple\n}\n', - source: 'out/go/workflows/dag.go', - blocks: { - declaring_a_workflow: { - start: 26, - stop: 32, - }, - defining_a_task: { - start: 35, - stop: 43, - }, - adding_a_task_with_a_parent: { - start: 46, - stop: 74, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/durable-event.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/durable-event.ts deleted file mode 100644 index 83c66bb5a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/durable-event.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype DurableEventInput struct {\n\tMessage string\n}\n\ntype EventData struct {\n\tMessage string\n}\n\ntype DurableEventOutput struct {\n\tData EventData\n}\n\nfunc DurableEvent(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[DurableEventInput, DurableEventOutput] {\n\t// > Durable Event\n\tdurableEventTask := factory.NewDurableTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "durable-event",\n\t\t},\n\t\tfunc(ctx worker.DurableHatchetContext, input DurableEventInput) (*DurableEventOutput, error) {\n\t\t\teventData, err := ctx.WaitForEvent("user:update", "")\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tv := EventData{}\n\t\t\terr = eventData.Unmarshal(&v)\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &DurableEventOutput{\n\t\t\t\tData: v,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\tfactory.NewDurableTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "durable-event",\n\t\t},\n\t\tfunc(ctx worker.DurableHatchetContext, input DurableEventInput) (*DurableEventOutput, error) {\n\t\t\t// > Durable Event With Filter\n\t\t\teventData, err := ctx.WaitForEvent("user:update", "input.user_id == \'1234\'")\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tv := EventData{}\n\t\t\terr = eventData.Unmarshal(&v)\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &DurableEventOutput{\n\t\t\t\tData: v,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn durableEventTask\n}\n', - source: 'out/go/workflows/durable-event.go', - blocks: { - durable_event: { - start: 25, - stop: 48, - }, - durable_event_with_filter: { - start: 56, - stop: 56, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/durable-sleep.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/durable-sleep.ts deleted file mode 100644 index 6f075a3b2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/durable-sleep.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"strings"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype DurableSleepInput struct {\n\tMessage string\n}\n\ntype DurableSleepOutput struct {\n\tTransformedMessage string\n}\n\nfunc DurableSleep(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[DurableSleepInput, DurableSleepOutput] {\n\t// > Durable Sleep\n\tsimple := factory.NewDurableTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "durable-sleep",\n\t\t},\n\t\tfunc(ctx worker.DurableHatchetContext, input DurableSleepInput) (*DurableSleepOutput, error) {\n\t\t\t_, err := ctx.SleepFor(10 * time.Second)\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &DurableSleepOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn simple\n}\n', - source: 'out/go/workflows/durable-sleep.go', - blocks: { - durable_sleep: { - start: 24, - stop: 40, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/index.ts deleted file mode 100644 index e7bba8f28..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/index.ts +++ /dev/null @@ -1,37 +0,0 @@ -import cancellations from './cancellations'; -import child_workflows from './child-workflows'; -import complex_conditions from './complex-conditions'; -import concurrency_rr from './concurrency-rr'; -import dag_with_conditions from './dag-with-conditions'; -import dag from './dag'; -import durable_event from './durable-event'; -import durable_sleep from './durable-sleep'; -import non_retryable_error from './non-retryable-error'; -import on_cron from './on-cron'; -import on_event from './on-event'; -import on_failure from './on-failure'; -import priority from './priority'; -import ratelimit from './ratelimit'; -import retries from './retries'; -import simple from './simple'; -import sticky from './sticky'; -import timeouts from './timeouts'; - -export { cancellations }; -export { child_workflows }; -export { complex_conditions }; -export { concurrency_rr }; -export { dag_with_conditions }; -export { dag }; -export { durable_event }; -export { durable_sleep }; -export { non_retryable_error }; -export { on_cron }; -export { on_event }; -export { on_failure }; -export { priority }; -export { ratelimit }; -export { retries }; -export { simple }; -export { sticky }; -export { timeouts }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/non-retryable-error.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/non-retryable-error.ts deleted file mode 100644 index fc5b1b59f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/non-retryable-error.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"errors"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype NonRetryableInput struct{}\ntype NonRetryableResult struct{}\n\n// NonRetryableError returns a workflow which throws a non-retryable error\nfunc NonRetryableError(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[NonRetryableInput, NonRetryableResult] {\n\t// > Non Retryable Error\n\tretries := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "non-retryable-task",\n\t\t\tRetries: 3,\n\t\t}, func(ctx worker.HatchetContext, input NonRetryableInput) (*NonRetryableResult, error) {\n\t\t\treturn nil, worker.NewNonRetryableError(errors.New("intentional failure"))\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn retries\n}\n', - source: 'out/go/workflows/non-retryable-error.go', - blocks: { - non_retryable_error: { - start: 19, - stop: 27, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-cron.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-cron.ts deleted file mode 100644 index 1c39a5b0b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-cron.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"strings"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype OnCronInput struct {\n\tMessage string `json:"Message"`\n}\n\ntype JobResult struct {\n\tTransformedMessage string `json:"TransformedMessage"`\n}\n\ntype OnCronOutput struct {\n\tJob JobResult `json:"job"`\n}\n\n// > Workflow Definition Cron Trigger\nfunc OnCron(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[OnCronInput, OnCronOutput] {\n\t// Create a standalone task that transforms a message\n\tcronTask := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "on-cron-task",\n\t\t\t// 👀 add a cron expression\n\t\t\tOnCron: []string{"0 0 * * *"}, // Run every day at midnight\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input OnCronInput) (*OnCronOutput, error) {\n\t\t\treturn &OnCronOutput{\n\t\t\t\tJob: JobResult{\n\t\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t\t},\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn cronTask\n}\n\n', - source: 'out/go/workflows/on-cron.go', - blocks: { - workflow_definition_cron_trigger: { - start: 26, - stop: 46, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-event.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-event.ts deleted file mode 100644 index 79b7a8def..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-event.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"fmt"\n\t"strings"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype EventInput struct {\n\tMessage string\n}\n\ntype LowerTaskOutput struct {\n\tTransformedMessage string\n}\n\ntype UpperTaskOutput struct {\n\tTransformedMessage string\n}\n\n// > Run workflow on event\nconst SimpleEvent = "simple-event:create"\n\nfunc Lower(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, LowerTaskOutput] {\n\treturn factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "lower",\n\t\t\t// 👀 Declare the event that will trigger the workflow\n\t\t\tOnEvents: []string{SimpleEvent},\n\t\t}, func(ctx worker.HatchetContext, input EventInput) (*LowerTaskOutput, error) {\n\t\t\t// Transform the input message to lowercase\n\t\t\treturn &LowerTaskOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n}\n\n\n// > Accessing the filter payload\nfunc accessFilterPayload(ctx worker.HatchetContext, input EventInput) (*LowerTaskOutput, error) {\n\tfmt.Println(ctx.FilterPayload())\n\treturn &LowerTaskOutput{\n\t\tTransformedMessage: strings.ToLower(input.Message),\n\t}, nil\n}\n\n\n// > Declare with filter\nfunc LowerWithFilter(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, LowerTaskOutput] {\n\treturn factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "lower",\n\t\t\t// 👀 Declare the event that will trigger the workflow\n\t\t\tOnEvents: []string{SimpleEvent},\n\t\t\tDefaultFilters: []types.DefaultFilter{{\n\t\t\t\tExpression: "true",\n\t\t\t\tScope: "example-scope",\n\t\t\t\tPayload: map[string]interface{}{\n\t\t\t\t\t"main_character": "Anna",\n\t\t\t\t\t"supporting_character": "Stiva",\n\t\t\t\t\t"location": "Moscow"},\n\t\t\t}},\n\t\t}, accessFilterPayload,\n\t\thatchet,\n\t)\n}\n\n\nfunc Upper(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, UpperTaskOutput] {\n\treturn factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "upper",\n\t\t\tOnEvents: []string{SimpleEvent},\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input EventInput) (*UpperTaskOutput, error) {\n\t\t\treturn &UpperTaskOutput{\n\t\t\t\tTransformedMessage: strings.ToUpper(input.Message),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n}\n', - source: 'out/go/workflows/on-event.go', - blocks: { - run_workflow_on_event: { - start: 28, - stop: 45, - }, - accessing_the_filter_payload: { - start: 48, - stop: 54, - }, - declare_with_filter: { - start: 57, - stop: 75, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-failure.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-failure.ts deleted file mode 100644 index 521c707ce..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/on-failure.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"errors"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype AlwaysFailsOutput struct {\n\tTransformedMessage string\n}\n\ntype OnFailureOutput struct {\n\tFailureRan bool\n}\n\ntype OnFailureSuccessResult struct {\n\tAlwaysFails AlwaysFailsOutput\n}\n\nfunc OnFailure(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[any, OnFailureSuccessResult] {\n\n\tsimple := factory.NewWorkflow[any, OnFailureSuccessResult](\n\t\tcreate.WorkflowCreateOpts[any]{\n\t\t\tName: "on-failure",\n\t\t},\n\t\thatchet,\n\t)\n\n\tsimple.Task(\n\t\tcreate.WorkflowTask[any, OnFailureSuccessResult]{\n\t\t\tName: "AlwaysFails",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ any) (interface{}, error) {\n\t\t\treturn &AlwaysFailsOutput{\n\t\t\t\tTransformedMessage: "always fails",\n\t\t\t}, errors.New("always fails")\n\t\t},\n\t)\n\n\tsimple.OnFailure(\n\t\tcreate.WorkflowOnFailureTask[any, OnFailureSuccessResult]{},\n\t\tfunc(ctx worker.HatchetContext, _ any) (interface{}, error) {\n\t\t\treturn &OnFailureOutput{\n\t\t\t\tFailureRan: true,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn simple\n}\n', - source: 'out/go/workflows/on-failure.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/priority.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/priority.ts deleted file mode 100644 index 3f1c80a79..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/priority.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype PriorityInput struct {\n\tUserId string `json:"userId"`\n}\n\ntype PriorityOutput struct {\n\tTransformedMessage string `json:"TransformedMessage"`\n}\n\ntype Result struct {\n\tStep PriorityOutput\n}\n\nfunc Priority(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[PriorityInput, Result] {\n\t// Create a standalone task that transforms a message\n\n\t// > Default priority\n\tdefaultPriority := int32(1)\n\n\tworkflow := factory.NewWorkflow[PriorityInput, Result](\n\t\tcreate.WorkflowCreateOpts[PriorityInput]{\n\t\t\tName: "priority",\n\t\t\tDefaultPriority: &defaultPriority,\n\t\t},\n\t\thatchet,\n\t)\n\n\t// > Defining a Task\n\tworkflow.Task(\n\t\tcreate.WorkflowTask[PriorityInput, Result]{\n\t\t\tName: "step",\n\t\t}, func(ctx worker.HatchetContext, input PriorityInput) (interface{}, error) {\n\t\t\ttime.Sleep(time.Second * 5)\n\t\t\treturn &PriorityOutput{\n\t\t\t\tTransformedMessage: input.UserId,\n\t\t\t}, nil\n\t\t},\n\t)\n\treturn workflow\n}\n\n', - source: 'out/go/workflows/priority.go', - blocks: { - default_priority: { - start: 29, - stop: 37, - }, - defining_a_task: { - start: 40, - stop: 49, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/ratelimit.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/ratelimit.ts deleted file mode 100644 index 6fb02c7ad..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/ratelimit.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"strings"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/features"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype RateLimitInput struct {\n\tUserId string `json:"userId"`\n}\n\ntype RateLimitOutput struct {\n\tTransformedMessage string `json:"TransformedMessage"`\n}\n\nfunc upsertRateLimit(hatchet v1.HatchetClient) {\n\t// > Upsert Rate Limit\n\thatchet.RateLimits().Upsert(\n\t\tfeatures.CreateRatelimitOpts{\n\t\t\tKey: "api-service-rate-limit",\n\t\t\tLimit: 10,\n\t\t\tDuration: types.Second,\n\t\t},\n\t)\n}\n\n// > Static Rate Limit\nfunc StaticRateLimit(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[RateLimitInput, RateLimitOutput] {\n\t// Create a standalone task that transforms a message\n\n\t// define the parameters for the rate limit\n\trateLimitKey := "api-service-rate-limit"\n\tunits := 1\n\n\trateLimitTask := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "rate-limit-task",\n\t\t\t// 👀 add a static rate limit\n\t\t\tRateLimits: []*types.RateLimit{\n\t\t\t\t{\n\t\t\t\t\tKey: rateLimitKey,\n\t\t\t\t\tUnits: &units,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input RateLimitInput) (*RateLimitOutput, error) {\n\t\t\treturn &RateLimitOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.UserId),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn rateLimitTask\n}\n\n\n// > Dynamic Rate Limit\nfunc RateLimit(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[RateLimitInput, RateLimitOutput] {\n\t// Create a standalone task that transforms a message\n\n\t// define the parameters for the rate limit\n\texpression := "input.userId"\n\tunits := 1\n\tduration := types.Second\n\n\trateLimitTask := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "rate-limit-task",\n\t\t\t// 👀 add a dynamic rate limit\n\t\t\tRateLimits: []*types.RateLimit{\n\t\t\t\t{\n\t\t\t\t\tKeyExpr: &expression,\n\t\t\t\t\tUnits: &units,\n\t\t\t\t\tDuration: &duration,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input RateLimitInput) (*RateLimitOutput, error) {\n\t\t\treturn &RateLimitOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.UserId),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn rateLimitTask\n}\n\n', - source: 'out/go/workflows/ratelimit.go', - blocks: { - upsert_rate_limit: { - start: 25, - stop: 31, - }, - static_rate_limit: { - start: 35, - stop: 63, - }, - dynamic_rate_limit: { - start: 66, - stop: 96, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/retries.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/retries.ts deleted file mode 100644 index 79a6fd388..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/retries.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"errors"\n\t"fmt"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype RetriesInput struct{}\ntype RetriesResult struct{}\n\n// Simple retries example that always fails\nfunc Retries(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[RetriesInput, RetriesResult] {\n\t// > Simple Step Retries\n\tretries := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "retries-task",\n\t\t\tRetries: 3,\n\t\t}, func(ctx worker.HatchetContext, input RetriesInput) (*RetriesResult, error) {\n\t\t\treturn nil, errors.New("intentional failure")\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn retries\n}\n\ntype RetriesWithCountInput struct{}\ntype RetriesWithCountResult struct {\n\tMessage string `json:"message"`\n}\n\n// Retries example that succeeds after a certain number of retries\nfunc RetriesWithCount(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[RetriesWithCountInput, RetriesWithCountResult] {\n\t// > Retries with Count\n\tretriesWithCount := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "fail-twice-task",\n\t\t\tRetries: 3,\n\t\t}, func(ctx worker.HatchetContext, input RetriesWithCountInput) (*RetriesWithCountResult, error) {\n\t\t\t// Get the current retry count\n\t\t\tretryCount := ctx.RetryCount()\n\n\t\t\tfmt.Printf("Retry count: %d\\n", retryCount)\n\n\t\t\tif retryCount < 2 {\n\t\t\t\treturn nil, errors.New("intentional failure")\n\t\t\t}\n\n\t\t\treturn &RetriesWithCountResult{\n\t\t\t\tMessage: "success",\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn retriesWithCount\n}\n\ntype BackoffInput struct{}\ntype BackoffResult struct{}\n\n// Retries example with simple backoff (no configuration in this API version)\nfunc WithBackoff(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[BackoffInput, BackoffResult] {\n\t// > Retries with Backoff\n\twithBackoff := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "with-backoff-task",\n\t\t\t// 👀 Maximum number of seconds to wait between retries\n\t\t\tRetries: 3,\n\t\t\t// 👀 Factor to increase the wait time between retries.\n\t\t\tRetryBackoffFactor: 2,\n\t\t\t// 👀 Maximum number of seconds to wait between retries\n\t\t\t// This sequence will be 2s, 4s, 8s, 10s, 10s, 10s... due to the maxSeconds limit\n\t\t\tRetryMaxBackoffSeconds: 10,\n\t\t}, func(ctx worker.HatchetContext, input BackoffInput) (*BackoffResult, error) {\n\t\t\treturn nil, errors.New("intentional failure")\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn withBackoff\n}\n', - source: 'out/go/workflows/retries.go', - blocks: { - simple_step_retries: { - start: 20, - stop: 28, - }, - retries_with_count: { - start: 41, - stop: 60, - }, - retries_with_backoff: { - start: 71, - stop: 85, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/simple.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/simple.ts deleted file mode 100644 index a7a25aa5f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/simple.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"context"\n\t"fmt"\n\t"strings"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\tv1worker "github.com/hatchet-dev/hatchet/pkg/v1/worker"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype SimpleInput struct {\n\tMessage string\n}\ntype SimpleResult struct {\n\tTransformedMessage string\n}\n\nfunc Simple(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[SimpleInput, SimpleResult] {\n\n\t// Create a simple standalone task using the task factory\n\t// Note the use of typed generics for both input and output\n\n\t// > Declaring a Task\n\tsimple := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "simple-task",\n\t\t}, func(ctx worker.HatchetContext, input SimpleInput) (*SimpleResult, error) {\n\t\t\t// Transform the input message to lowercase\n\t\t\treturn &SimpleResult{\n\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\t// Example of running a task\n\t_ = func() error {\n\t\t// > Running a Task\n\t\tresult, err := simple.Run(context.Background(), SimpleInput{Message: "Hello, World!"})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(result.TransformedMessage)\n\t\treturn nil\n\t}\n\n\t// Example of registering a task on a worker\n\t_ = func() error {\n\t\t// > Declaring a Worker\n\t\tw, err := hatchet.Worker(v1worker.WorkerOpts{\n\t\t\tName: "simple-worker",\n\t\t\tWorkflows: []workflow.WorkflowBase{\n\t\t\t\tsimple,\n\t\t\t},\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = w.StartBlocking(context.Background())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\treturn simple\n}\n\nfunc ParentTask(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[SimpleInput, SimpleResult] {\n\n\t// > Spawning Tasks from within a Task\n\tsimple := Simple(hatchet)\n\n\tparent := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "parent-task",\n\t\t}, func(ctx worker.HatchetContext, input SimpleInput) (*SimpleResult, error) {\n\n\t\t\t// Run the child task\n\t\t\tchild, err := workflow.RunChildWorkflow(ctx, simple, SimpleInput{Message: input.Message})\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\t// Transform the input message to lowercase\n\t\t\treturn &SimpleResult{\n\t\t\t\tTransformedMessage: child.TransformedMessage,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn parent\n}\n', - source: 'out/go/workflows/simple.go', - blocks: { - declaring_a_task: { - start: 29, - stop: 39, - }, - running_a_task: { - start: 44, - stop: 48, - }, - declaring_a_worker: { - start: 55, - stop: 67, - }, - spawning_tasks_from_within_a_task: { - start: 77, - stop: 96, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/sticky.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/sticky.ts deleted file mode 100644 index 2aa162f6e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/sticky.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"fmt"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype StickyInput struct{}\n\ntype StickyResult struct {\n\tResult string `json:"result"`\n}\n\ntype StickyDagResult struct {\n\tStickyTask1 StickyResult `json:"sticky-task-1"`\n\tStickyTask2 StickyResult `json:"sticky-task-2"`\n}\n\nfunc StickyDag(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[StickyInput, StickyDagResult] {\n\tstickyDag := factory.NewWorkflow[StickyInput, StickyDagResult](\n\t\tcreate.WorkflowCreateOpts[StickyInput]{\n\t\t\tName: "sticky-dag",\n\t\t},\n\t\thatchet,\n\t)\n\n\tstickyDag.Task(\n\t\tcreate.WorkflowTask[StickyInput, StickyDagResult]{\n\t\t\tName: "sticky-task",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input StickyInput) (interface{}, error) {\n\t\t\tworkerId := ctx.Worker().ID()\n\n\t\t\treturn &StickyResult{\n\t\t\t\tResult: workerId,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\tstickyDag.Task(\n\t\tcreate.WorkflowTask[StickyInput, StickyDagResult]{\n\t\t\tName: "sticky-task-2",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input StickyInput) (interface{}, error) {\n\t\t\tworkerId := ctx.Worker().ID()\n\n\t\t\treturn &StickyResult{\n\t\t\t\tResult: workerId,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn stickyDag\n}\n\nfunc Sticky(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[StickyInput, StickyResult] {\n\tsticky := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "sticky-task",\n\t\t\tRetries: 3,\n\t\t}, func(ctx worker.HatchetContext, input StickyInput) (*StickyResult, error) {\n\t\t\t// Run a child workflow on the same worker\n\t\t\tchildWorkflow := Child(hatchet)\n\t\t\tsticky := true\n\t\t\tchildResult, err := childWorkflow.RunAsChild(ctx, ChildInput{N: 1}, workflow.RunAsChildOpts{\n\t\t\t\tSticky: &sticky,\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &StickyResult{\n\t\t\t\tResult: fmt.Sprintf("child-result-%d", childResult.Value),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn sticky\n}\n', - source: 'out/go/workflows/sticky.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/timeouts.ts b/frontend/app/src/next/lib/docs/generated/snips/go/workflows/timeouts.ts deleted file mode 100644 index ff2d4a2b9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/workflows/timeouts.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package v1_workflows\n\nimport (\n\t"errors"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client/create"\n\tv1 "github.com/hatchet-dev/hatchet/pkg/v1"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/factory"\n\t"github.com/hatchet-dev/hatchet/pkg/v1/workflow"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype TimeoutInput struct{}\ntype TimeoutResult struct {\n\tCompleted bool\n}\n\nfunc Timeout(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[TimeoutInput, TimeoutResult] {\n\n\t// > Execution Timeout\n\t// Create a task with a timeout of 3 seconds that tries to sleep for 10 seconds\n\ttimeout := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "timeout-task",\n\t\t\tExecutionTimeout: 3 * time.Second, // Task will timeout after 3 seconds\n\t\t}, func(ctx worker.HatchetContext, input TimeoutInput) (*TimeoutResult, error) {\n\t\t\t// Sleep for 10 seconds\n\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t// Check if the context was cancelled due to timeout\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil, errors.New("TASK TIMED OUT")\n\t\t\tdefault:\n\t\t\t\t// Continue execution\n\t\t\t}\n\n\t\t\treturn &TimeoutResult{\n\t\t\t\tCompleted: true,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn timeout\n}\n\nfunc RefreshTimeout(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[TimeoutInput, TimeoutResult] {\n\n\t// > Refresh Timeout\n\ttimeout := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: "timeout-task",\n\t\t\tExecutionTimeout: 3 * time.Second, // Task will timeout after 3 seconds\n\t\t}, func(ctx worker.HatchetContext, input TimeoutInput) (*TimeoutResult, error) {\n\n\t\t\t// Refresh the timeout by 10 seconds (new timeout will be 13 seconds)\n\t\t\tctx.RefreshTimeout("10s")\n\n\t\t\t// Sleep for 10 seconds\n\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t// Check if the context was cancelled due to timeout\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil, errors.New("TASK TIMED OUT")\n\t\t\tdefault:\n\t\t\t\t// Continue execution\n\t\t\t}\n\n\t\t\treturn &TimeoutResult{\n\t\t\t\tCompleted: true,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn timeout\n}\n', - source: 'out/go/workflows/timeouts.go', - blocks: { - execution_timeout: { - start: 22, - stop: 44, - }, - refresh_timeout: { - start: 52, - stop: 77, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/index.ts deleted file mode 100644 index 3042aa1d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main }; -export { run }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/main.ts deleted file mode 100644 index f80457dfd..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\tcleanup, err := run()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("cleanup() error = %v", err))\n\t}\n}\n', - source: 'out/go/z_v0/assignment-affinity/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/run.ts deleted file mode 100644 index 48f2045ef..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithLabels(map[string]interface{}{\n\t\t\t"model": "fancy-ai-model-v2",\n\t\t\t"memory": 1024,\n\t\t}),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:create:affinity"),\n\t\t\tName: "affinity",\n\t\t\tDescription: "affinity",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tmodel := ctx.Worker().GetLabels()["model"]\n\n\t\t\t\t\tif model != "fancy-ai-model-v3" {\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t"model": nil,\n\t\t\t\t\t\t})\n\t\t\t\t\t\t// Do something to load the model\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t"model": "fancy-ai-model-v3",\n\t\t\t\t\t\t})\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).\n\t\t\t\t\tSetName("step-one").\n\t\t\t\t\tSetDesiredLabels(map[string]*types.DesiredWorkerLabel{\n\t\t\t\t\t\t"model": {\n\t\t\t\t\t\t\tValue: "fancy-ai-model-v3",\n\t\t\t\t\t\t\tWeight: 10,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t"memory": {\n\t\t\t\t\t\t\tValue: 512,\n\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\tComparator: types.ComparatorPtr(types.WorkerLabelComparator_GREATER_THAN),\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf("pushing event")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:create:affinity",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/assignment-affinity/run.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/index.ts deleted file mode 100644 index 3042aa1d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main }; -export { run }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/main.ts deleted file mode 100644 index 9c6bfd497..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\tcleanup, err := run()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("cleanup() error = %v", err))\n\t}\n}\n', - source: 'out/go/z_v0/assignment-sticky/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/run.ts deleted file mode 100644 index 03845af88..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/run.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\t// > StickyWorker\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:create:sticky"),\n\t\t\tName: "sticky",\n\t\t\tDescription: "sticky",\n\t\t\t// 👀 Specify a sticky strategy when declaring the workflow\n\t\t\tStickyStrategy: types.StickyStrategyPtr(types.StickyStrategy_HARD),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tsticky := true\n\n\t\t\t\t\t_, err = ctx.SpawnWorkflow("sticky-child", nil, &worker.SpawnWorkflowOpts{\n\t\t\t\t\t\tSticky: &sticky,\n\t\t\t\t\t})\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf("error spawning workflow: %w", err)\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two").AddParents("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-three").AddParents("step-two"),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\t// > StickyChild\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: "sticky-child",\n\t\t\tDescription: "sticky",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf("pushing event")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:create:sticky",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/assignment-sticky/run.go', - blocks: { - stickyworker: { - start: 30, - stop: 68, - }, - stickychild: { - start: 75, - stop: 90, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_imports/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_imports/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_imports/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_imports/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_imports/main.ts deleted file mode 100644 index 926cabf6e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_imports/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t_, err = run()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n}\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\terr = testSvc.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:create:bulk"),\n\t\t\tName: "bulk",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-one")\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Username is: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tvar events []client.EventWithAdditionalMetadata\n\n\t// 20000 times to test the bulk push\n\n\tfor i := 0; i < 20000; i++ {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234 " + fmt.Sprint(i),\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test " + fmt.Sprint(i),\n\t\t\t},\n\t\t}\n\t\tevents = append(events, client.EventWithAdditionalMetadata{\n\t\t\tEvent: testEvent,\n\t\t\tAdditionalMetadata: map[string]string{"hello": "world " + fmt.Sprint(i)},\n\t\t\tKey: "user:create:bulk",\n\t\t})\n\t}\n\n\tlog.Printf("pushing event user:create:bulk")\n\n\terr = c.Event().BulkPush(\n\t\tcontext.Background(),\n\t\tevents,\n\t)\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t}\n\n\treturn nil, nil\n\n}\n', - source: 'out/go/z_v0/bulk_imports/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/index.ts deleted file mode 100644 index 3042aa1d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main }; -export { run }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/main.ts deleted file mode 100644 index 30bd433c9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tworkflowName := "simple-bulk-workflow"\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error creating client: %w", err))\n\t}\n\n\t_, err = registerWorkflow(c, workflowName)\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error registering workflow: %w", err))\n\t}\n\n\tquantity := 999\n\n\toverallStart := time.Now()\n\titerations := 10\n\tfor i := 0; i < iterations; i++ {\n\t\tstartTime := time.Now()\n\n\t\tfmt.Printf("Running the %dth bulk workflow \\n", i)\n\n\t\terr = runBulk(workflowName, quantity)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tfmt.Printf("Time taken to queue %dth bulk workflow: %v\\n", i, time.Since(startTime))\n\t}\n\tfmt.Println("Overall time taken: ", time.Since(overallStart))\n\tfmt.Printf("That is %d workflows per second\\n", int(float64(quantity*iterations)/time.Since(overallStart).Seconds()))\n\tfmt.Println("Starting the worker")\n\n\t// err = runSingles(workflowName, quantity)\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error creating client: %w", err))\n\t}\n\n\t// I want to start the wofklow worker here\n\n\tw, err := registerWorkflow(c, workflowName)\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error creating worker: %w", err))\n\t}\n\n\tcleanup, err := w.Start()\n\tfmt.Println("Starting the worker")\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error starting worker: %w", err))\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn "my-key", nil\n}\n\nfunc registerWorkflow(c client.Client, workflowName string) (w *worker.Worker, err error) {\n\n\tw, err = worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:create:bulk-simple"),\n\t\t\tName: workflowName,\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(200).LimitStrategy(types.GroupRoundRobin),\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-one")\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Username is: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput("step-one", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-two")\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Above message is: " + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two").AddParents("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\treturn w, nil\n}\n', - source: 'out/go/z_v0/bulk_workflows/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/run.ts deleted file mode 100644 index e8709b32d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/bulk_workflows/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"log"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n)\n\nfunc runBulk(workflowName string, quantity int) error {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tlog.Printf("pushing %d workflows in bulk", quantity)\n\n\tvar workflows []*client.WorkflowRun\n\tfor i := 0; i < quantity; i++ {\n\t\tdata := map[string]interface{}{\n\t\t\t"username": fmt.Sprintf("echo-test-%d", i),\n\t\t\t"user_id": fmt.Sprintf("1234-%d", i),\n\t\t}\n\t\tworkflows = append(workflows, &client.WorkflowRun{\n\t\t\tName: workflowName,\n\t\t\tInput: data,\n\t\t\tOptions: []client.RunOptFunc{\n\t\t\t\t// setting a dedupe key so these shouldn\'t all run\n\t\t\t\tclient.WithRunMetadata(map[string]interface{}{\n\t\t\t\t\t// "dedupe": "dedupe1",\n\t\t\t\t}),\n\t\t\t},\n\t\t})\n\n\t}\n\n\touts, err := c.Admin().BulkRunWorkflow(workflows)\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t}\n\n\tfor _, out := range outs {\n\t\tlog.Printf("workflow run id: %v", out)\n\t}\n\n\treturn nil\n\n}\n\nfunc runSingles(workflowName string, quantity int) error {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tlog.Printf("pushing %d single workflows", quantity)\n\n\tvar workflows []*client.WorkflowRun\n\tfor i := 0; i < quantity; i++ {\n\t\tdata := map[string]interface{}{\n\t\t\t"username": fmt.Sprintf("echo-test-%d", i),\n\t\t\t"user_id": fmt.Sprintf("1234-%d", i),\n\t\t}\n\t\tworkflows = append(workflows, &client.WorkflowRun{\n\t\t\tName: workflowName,\n\t\t\tInput: data,\n\t\t\tOptions: []client.RunOptFunc{\n\t\t\t\tclient.WithRunMetadata(map[string]interface{}{\n\t\t\t\t\t// "dedupe": "dedupe1",\n\t\t\t\t}),\n\t\t\t},\n\t\t})\n\t}\n\n\tfor _, wf := range workflows {\n\n\t\tgo func() {\n\t\t\tout, err := c.Admin().RunWorkflow(wf.Name, wf.Input, wf.Options...)\n\t\t\tif err != nil {\n\t\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t\t}\n\n\t\t\tlog.Printf("workflow run id: %v", out)\n\t\t}()\n\n\t}\n\n\treturn nil\n}\n', - source: 'out/go/z_v0/bulk_workflows/run.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/index.ts deleted file mode 100644 index 3042aa1d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main }; -export { run }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/main.ts deleted file mode 100644 index 5724e9f31..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tch := cmdutils.InterruptChan()\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("cleanup() error = %v", err))\n\t}\n}\n', - source: 'out/go/z_v0/cancellation/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/run.ts deleted file mode 100644 index 97b9f7b84..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/google/uuid"\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/rest"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:create:cancellation"),\n\t\t\tName: "cancellation",\n\t\t\tDescription: "cancellation",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tselect {\n\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\tevents <- "done"\n\t\t\t\t\t\tlog.Printf("context cancelled")\n\t\t\t\t\t\treturn nil, nil\n\t\t\t\t\tcase <-time.After(30 * time.Second):\n\t\t\t\t\t\tlog.Printf("workflow never cancelled")\n\t\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\t\tMessage: "done",\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t}\n\t\t\t\t}).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf("pushing event")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:create:cancellation",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\n\t\tworkflowName := "cancellation"\n\n\t\tworkflows, err := c.API().WorkflowListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowListParams{\n\t\t\tName: &workflowName,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error listing workflows: %w", err))\n\t\t}\n\n\t\tif workflows.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf("no workflows found"))\n\t\t}\n\n\t\trows := *workflows.JSON200.Rows\n\n\t\tif len(rows) == 0 {\n\t\t\tpanic(fmt.Errorf("no workflows found"))\n\t\t}\n\n\t\tworkflowId := uuid.MustParse(rows[0].Metadata.Id)\n\n\t\tworkflowRuns, err := c.API().WorkflowRunListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowRunListParams{\n\t\t\tWorkflowId: &workflowId,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error listing workflow runs: %w", err))\n\t\t}\n\n\t\tif workflowRuns.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf("no workflow runs found"))\n\t\t}\n\n\t\tworkflowRunsRows := *workflowRuns.JSON200.Rows\n\n\t\t_, err = c.API().WorkflowRunCancelWithResponse(context.Background(), uuid.MustParse(c.TenantId()), rest.WorkflowRunsCancelRequest{\n\t\t\tWorkflowRunIds: []uuid.UUID{uuid.MustParse(workflowRunsRows[0].Metadata.Id)},\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error cancelling workflow run: %w", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/cancellation/run.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/main.ts deleted file mode 100644 index 6133266e3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/compute"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\tpool := "test-pool"\n\tbasicCompute := compute.Compute{\n\t\tPool: &pool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 1,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindSharedCPU,\n\t\tRegions: []compute.Region{compute.Region("ewr")},\n\t}\n\n\tperformancePool := "performance-pool"\n\tperformanceCompute := compute.Compute{\n\t\tPool: &performancePool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 2,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindPerformanceCPU,\n\t\tRegions: []compute.Region{compute.Region("ewr")},\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:create:simple"),\n\t\t\tName: "simple",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tConcurrency: worker.Expression("input.user_id"),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-one")\n\t\t\t\t\tevents <- "step-one"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Username is: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one").SetCompute(&basicCompute),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput("step-one", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-two")\n\t\t\t\t\tevents <- "step-two"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Above message is: " + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two").AddParents("step-one").SetCompute(&performanceCompute),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf("pushing event user:create:simple")\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:create:simple",\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t"hello": "world",\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/compute/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/concurrency/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/concurrency/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/concurrency/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/concurrency/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/concurrency/main.ts deleted file mode 100644 index 422789887..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/concurrency/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:create:concurrency"),\n\t\t\tName: "simple-concurrency",\n\t\t\tDescription: "This runs to test concurrency.",\n\t\t\tConcurrency: worker.Expression("\'concurrency\'").MaxRuns(1).LimitStrategy(types.GroupRoundRobin),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\t// we sleep to simulate a long running task\n\t\t\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t\t\tif err != nil {\n\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tif ctx.Err() != nil {\n\t\t\t\t\t\treturn nil, ctx.Err()\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-one")\n\t\t\t\t\tevents <- "step-one"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Username is: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput("step-one", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tif ctx.Err() != nil {\n\t\t\t\t\t\treturn nil, ctx.Err()\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-two")\n\t\t\t\t\tevents <- "step-two"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Above message is: " + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two").AddParents("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\ttestEvent := userCreateEvent{\n\t\tUsername: "echo-test",\n\t\tUserID: "1234",\n\t\tData: map[string]string{\n\t\t\t"test": "test",\n\t\t},\n\t}\n\tgo func() {\n\t\t// do this 10 times to test concurrency\n\t\tfor i := 0; i < 10; i++ {\n\n\t\t\twfr_id, err := c.Admin().RunWorkflow("simple-concurrency", testEvent)\n\n\t\t\tlog.Println("Starting workflow run id: ", wfr_id)\n\n\t\t\tif err != nil {\n\t\t\t\tpanic(fmt.Errorf("error running workflow: %w", err))\n\t\t\t}\n\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/concurrency/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron-programmatic/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron-programmatic/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron-programmatic/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron-programmatic/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron-programmatic/main.ts deleted file mode 100644 index 920212d99..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron-programmatic/main.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\n// > Create\n// ... normal workflow definition\ntype printOutput struct{}\n\nfunc print(ctx context.Context) (result *printOutput, err error) {\n\tfmt.Println("called print:print")\n\n\treturn &printOutput{}, nil\n}\n\n// ,\nfunc main() {\n\t// ... initialize client, worker and workflow\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: "cron-workflow",\n\t\t\tDescription: "Demonstrates a simple cron workflow",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(print),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ,\n\n\tgo func() {\n\t\t// 👀 define the cron expression to run every minute\n\t\tcron, err := c.Cron().Create(\n\t\t\tcontext.Background(),\n\t\t\t"cron-workflow",\n\t\t\t&client.CronOpts{\n\t\t\t\tName: "every-minute",\n\t\t\t\tExpression: "* * * * *",\n\t\t\t\tInput: map[string]interface{}{\n\t\t\t\t\t"message": "Hello, world!",\n\t\t\t\t},\n\t\t\t\tAdditionalMetadata: map[string]string{},\n\t\t\t},\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tfmt.Println(*cron.Name, cron.Cron)\n\t}()\n\n\t// ... wait for interrupt signal\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n\t// ,\n}\n\n\nfunc ListCrons() {\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > List\n\tcrons, err := c.Cron().List(context.Background())\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor _, cron := range *crons.Rows {\n\t\tfmt.Println(cron.Cron, *cron.Name)\n\t}\n}\n\nfunc DeleteCron(id string) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > Delete\n\t// 👀 id is the cron\'s metadata id, can get it via cron.Metadata.Id\n\terr = c.Cron().Delete(context.Background(), id)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n}\n', - source: 'out/go/z_v0/cron-programmatic/main.go', - blocks: { - create: { - start: 15, - stop: 106, - }, - list: { - start: 117, - stop: 117, - }, - delete: { - start: 136, - stop: 137, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron/main.ts deleted file mode 100644 index 37a0985ee..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cron/main.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\n// > Workflow Definition Cron Trigger\n// ... normal workflow definition\ntype printOutput struct{}\n\nfunc print(ctx context.Context) (result *printOutput, err error) {\n\tfmt.Println("called print:print")\n\n\treturn &printOutput{}, nil\n}\n\n// ,\nfunc main() {\n\t// ... initialize client and worker\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ,\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\t// 👀 define the cron expression to run every minute\n\t\t\tOn: worker.Cron("* * * * *"),\n\t\t\tName: "cron-workflow",\n\t\t\tDescription: "Demonstrates a simple cron workflow",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(print),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ... start worker\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n\t// ,\n}\n\n', - source: 'out/go/z_v0/cron/main.go', - blocks: { - workflow_definition_cron_trigger: { - start: 15, - stop: 84, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/main.ts deleted file mode 100644 index 59dcb588c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\terr = testSvc.On(\n\t\tworker.Events("user:create:simple"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "post-user-update",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(1 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 1 got username: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 2 got username: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-one", step1Out)\n\n\t\t\t\t\tstep2Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-two", step2Out)\n\n\t\t\t\t\ttime.Sleep(3 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Username was: " + input.Username + ", Step 3: has parents 1 and 2" + step1Out.Message + ", " + step2Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-three").AddParents("step-one", "step-two"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-one", step1Out)\n\n\t\t\t\t\tstep3Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-three", step3Out)\n\n\t\t\t\t\ttime.Sleep(4 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 4: has parents 1 and 3" + step1Out.Message + ", " + step3Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-four").AddParents("step-one", "step-three"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep4Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-four", step4Out)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 5: has parent 4" + step4Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-five").AddParents("step-four"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: "echo-test",\n\t\tUserID: "1234",\n\t\tData: map[string]string{\n\t\t\t"test": "test",\n\t\t},\n\t}\n\n\tlog.Printf("pushing event user:create:simple")\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t"user:create:simple",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error pushing event: %w", err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn cleanup()\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', - source: 'out/go/z_v0/dag/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/index.ts deleted file mode 100644 index c9ede1896..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import * as requeue from './requeue'; -import * as schedule_timeout from './schedule-timeout'; -import * as timeout from './timeout'; -import * as yaml from './yaml'; - -export { requeue }; -export { schedule_timeout }; -export { timeout }; -export { yaml }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/.hatchet/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/.hatchet/index.ts deleted file mode 100644 index 25c26c6e8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/.hatchet/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import job_requeue_workflow from './job-requeue-workflow'; - -export { job_requeue_workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.ts deleted file mode 100644 index 439354d89..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - 'name: "test-step-requeue"\nversion: v0.2.0\ntriggers:\n events:\n - example:event\njobs:\n requeue-job:\n steps:\n - id: requeue\n action: requeue:requeue\n timeout: 10s\n', - source: 'out/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.yaml', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/index.ts deleted file mode 100644 index 28879e4c2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import * as hatchet from './.hatchet'; - -export { main }; -export { hatchet }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/main.ts deleted file mode 100644 index 442e791f1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype sampleEvent struct{}\n\ntype requeueInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction("requeue:requeue", func(ctx context.Context, input *requeueInput) (result any, err error) {\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t"example:event",\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// wait to register the worker for 10 seconds, to let the requeuer kick in\n\ttime.Sleep(10 * time.Second)\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', - source: 'out/go/z_v0/deprecated/requeue/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/index.ts deleted file mode 100644 index 617676721..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import schedule_timeout_workflow from './schedule-timeout-workflow'; - -export { schedule_timeout_workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.ts deleted file mode 100644 index eaae1c0a3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - 'name: "test-schedule-timeout"\nversion: v0.1.0\ntriggers:\n events:\n - user:create\njobs:\n timeout-job:\n steps:\n - id: timeout\n action: timeout:timeout\n', - source: - 'out/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.yaml', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/index.ts deleted file mode 100644 index 28879e4c2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import * as hatchet from './.hatchet'; - -export { main }; -export { hatchet }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts deleted file mode 100644 index 92751531c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/joho/godotenv"\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t"user:create",\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttime.Sleep(35 * time.Second)\n\n\tfmt.Println("step should have timed out")\n}\n', - source: 'out/go/z_v0/deprecated/schedule-timeout/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/index.ts deleted file mode 100644 index f9bc992ad..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import job_timeout_workflow from './job-timeout-workflow'; -import step_timeout_workflow from './step-timeout-workflow'; - -export { job_timeout_workflow }; -export { step_timeout_workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.ts deleted file mode 100644 index d5f8076f6..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - 'name: "test-job-timeout"\nversion: v0.1.0\ntriggers:\n events:\n - user:create\njobs:\n timeout-job:\n timeout: 3s\n steps:\n - id: timeout\n action: timeout:timeout\n timeout: 10s\n', - source: 'out/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.yaml', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.ts deleted file mode 100644 index 26802aff7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - 'name: "test-step-timeout"\nversion: v0.1.0\ntriggers:\n events:\n - user:create\njobs:\n timeout-job:\n steps:\n - id: timeout\n action: timeout:timeout\n timeout: 5s\n # This step should not be reached\n - id: later-step\n action: timeout:timeout\n timeout: 5s\n', - source: 'out/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.yaml', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/index.ts deleted file mode 100644 index 28879e4c2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import * as hatchet from './.hatchet'; - -export { main }; -export { hatchet }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/main.ts deleted file mode 100644 index 1ea9f839a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction("timeout:timeout", func(ctx context.Context, input *timeoutInput) (result any, err error) {\n\t\t// wait for context done signal\n\t\ttimeStart := time.Now().UTC()\n\t\t<-ctx.Done()\n\t\tfmt.Println("context cancelled in ", time.Since(timeStart).Seconds(), " seconds")\n\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error starting worker: %w", err))\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t"user:create",\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', - source: 'out/go/z_v0/deprecated/timeout/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/.hatchet/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/.hatchet/index.ts deleted file mode 100644 index d62894b81..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/.hatchet/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import sample_workflow from './sample-workflow'; - -export { sample_workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.ts deleted file mode 100644 index 7b30c8e68..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - 'name: "post-user-sign-up"\nversion: v0.2.0\ntriggers:\n events:\n - user:create\njobs:\n print-user:\n steps:\n - id: echo1\n action: echo:echo\n timeout: 60s\n with:\n message: "Username is {{ .input.username }}"\n - id: echo2\n action: echo:echo\n timeout: 60s\n with:\n message: "Above message is: {{ .steps.echo1.message }}"\n - id: echo3\n action: echo:echo\n timeout: 60s\n with:\n message: "Above message is: {{ .steps.echo2.message }}"\n - id: testObject\n action: echo:object\n timeout: 60s\n with:\n object: "{{ .steps.echo3.json }}"\n', - source: 'out/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.yaml', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/README.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/README.ts deleted file mode 100644 index 105472637..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/README.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - "## YAML Workflow Example\n\nThis example shows how you can create a YAML file in your repository to define the structure of a workflow. This example runs the [sample-workflow.yaml](./.hatchet/sample-workflow.yaml).\n\n## Explanation\n\nThis folder contains a demo example of a workflow that simply echoes the input message as an output. The workflow file showcases the following features:\n\n- Running a simple job with a set of dependent steps\n- Variable references within step arguments -- each subsequent step in a workflow can call `.steps..` to access output arguments\n\n## How to run\n\nNavigate to this directory and run the following steps:\n\n1. Make sure you have a Hatchet server running (see the instructions [here](../../README.md)). After running `task seed`, grab the tenant ID which is output to the console.\n2. Set your environment variables -- if you're using the bundled Temporal server, this will look like:\n\n```sh\ncat > .env <\nHATCHET_CLIENT_TLS_ROOT_CA_FILE=../../hack/dev/certs/ca.cert\nHATCHET_CLIENT_TLS_CERT_FILE=../../hack/dev/certs/client-worker.pem\nHATCHET_CLIENT_TLS_KEY_FILE=../../hack/dev/certs/client-worker.key\nHATCHET_CLIENT_TLS_SERVER_NAME=cluster\nEOF\n```\n\n3. Run the following within this directory:\n\n```sh\ngo run main.go';\n```\n", - source: 'out/go/z_v0/deprecated/yaml/README.md', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/index.ts deleted file mode 100644 index 8ca06044a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import readme from './README'; -import main from './main'; -import * as hatchet from './.hatchet'; - -export { readme }; -export { main }; -export { hatchet }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/main.ts deleted file mode 100644 index 10fa54291..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserId string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype actionInput struct {\n\tMessage string `json:"message"`\n}\n\ntype actionOut struct {\n\tMessage string `json:"message"`\n}\n\nfunc echo(ctx context.Context, input *actionInput) (result *actionOut, err error) {\n\treturn &actionOut{\n\t\tMessage: input.Message,\n\t}, nil\n}\n\nfunc object(ctx context.Context, input *userCreateEvent) error {\n\treturn nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\techoSvc := worker.NewService("echo")\n\n\terr = echoSvc.RegisterAction(echo)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = echoSvc.RegisterAction(object)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error starting worker: %w", err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: "echo-test",\n\t\tUserId: "1234",\n\t\tData: map[string]string{\n\t\t\t"test": "test",\n\t\t},\n\t}\n\n\ttime.Sleep(1 * time.Second)\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t"user:create",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up worker: %w", err))\n\t}\n}\n', - source: 'out/go/z_v0/deprecated/yaml/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/main.ts deleted file mode 100644 index 86ad59a9f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"os"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/errors/sentry"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserId string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc StepOne(ctx context.Context) (result *stepOneOutput, err error) {\n\treturn nil, fmt.Errorf("this is an error")\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsentryAlerter, err := sentry.NewSentryAlerter(&sentry.SentryAlerterOpts{\n\t\tDSN: os.Getenv("SENTRY_DSN"),\n\t\tEnvironment: os.Getenv("SENTRY_ENVIRONMENT"),\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t\tworker.WithErrorAlerter(sentryAlerter),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(worker.Event("user:create"), &worker.WorkflowJob{\n\t\tName: "failing-workflow",\n\t\tDescription: "This is a failing workflow.",\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\t{\n\t\t\t\tFunction: StepOne,\n\t\t\t},\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction("echo:echo", func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t"message": input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction("echo:object", func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error starting worker: %w", err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: "echo-test",\n\t\tUserId: "1234",\n\t\tData: map[string]string{\n\t\t\t"test": "test",\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t"user:create",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n}\n', - source: 'out/go/z_v0/errors-test/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/index.ts deleted file mode 100644 index 1ec6b8b90..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/index.ts +++ /dev/null @@ -1,61 +0,0 @@ -import * as assignment_affinity from './assignment-affinity'; -import * as assignment_sticky from './assignment-sticky'; -import * as bulk_imports from './bulk_imports'; -import * as bulk_workflows from './bulk_workflows'; -import * as cancellation from './cancellation'; -import * as compute from './compute'; -import * as concurrency from './concurrency'; -import * as cron from './cron'; -import * as cron_programmatic from './cron-programmatic'; -import * as dag from './dag'; -import * as deprecated from './deprecated'; -import * as errors_test from './errors-test'; -import * as limit_concurrency from './limit-concurrency'; -import * as logging from './logging'; -import * as manual_trigger from './manual-trigger'; -import * as middleware from './middleware'; -import * as namespaced from './namespaced'; -import * as no_tls from './no-tls'; -import * as on_failure from './on-failure'; -import * as procedural from './procedural'; -import * as rate_limit from './rate-limit'; -import * as register_action from './register-action'; -import * as retries from './retries'; -import * as retries_with_backoff from './retries-with-backoff'; -import * as scheduled from './scheduled'; -import * as simple from './simple'; -import * as stream_event from './stream-event'; -import * as stream_event_by_meta from './stream-event-by-meta'; -import * as timeout from './timeout'; -import * as webhook from './webhook'; - -export { assignment_affinity }; -export { assignment_sticky }; -export { bulk_imports }; -export { bulk_workflows }; -export { cancellation }; -export { compute }; -export { concurrency }; -export { cron }; -export { cron_programmatic }; -export { dag }; -export { deprecated }; -export { errors_test }; -export { limit_concurrency }; -export { logging }; -export { manual_trigger }; -export { middleware }; -export { namespaced }; -export { no_tls }; -export { on_failure }; -export { procedural }; -export { rate_limit }; -export { register_action }; -export { retries }; -export { retries_with_backoff }; -export { scheduled }; -export { simple }; -export { stream_event }; -export { stream_event_by_meta }; -export { timeout }; -export { webhook }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts deleted file mode 100644 index 8625ba930..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype concurrencyLimitEvent struct {\n\tIndex int `json:"index"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn "user-create", nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\terr = testSvc.On(\n\t\tworker.Events("concurrency-test-event"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "concurrency-limit",\n\t\t\tDescription: "This limits concurrency to 1 run at a time.",\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(1),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\t<-ctx.Done()\n\t\t\t\t\tfmt.Println("context done, returning")\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println("interrupted")\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t\t\t}\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfirstEvent := concurrencyLimitEvent{\n\t\t\tIndex: 0,\n\t\t}\n\n\t\t// push an event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"concurrency-test-event",\n\t\t\tfirstEvent,\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println("interrupted")\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): // timeout\n\t\t}\n\n\t\t// push a second event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"concurrency-test-event",\n\t\t\tconcurrencyLimitEvent{\n\t\t\t\tIndex: 1,\n\t\t\t},\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', - source: 'out/go/z_v0/limit-concurrency/cancel-in-progress/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/main.ts deleted file mode 100644 index 2c8844367..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"sync"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype concurrencyLimitEvent struct {\n\tConcurrencyKey string `json:"concurrency_key"`\n\tUserId int `json:"user_id"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n\tConcurrencyWhenFinished int `json:"concurrency_when_finished"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tctx, cancel := cmdutils.NewInterruptContext()\n\tdefer cancel()\n\n\tif err := run(ctx); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn "concurrency", nil\n}\n\nvar done = make(chan struct{})\nvar errChan = make(chan error)\n\nvar workflowCount int\nvar countMux sync.Mutex\n\nfunc run(ctx context.Context) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\t// runningCount := 0\n\n\tcountMux := sync.Mutex{}\n\n\tvar countMap = make(map[string]int)\n\tmaxConcurrent := 2\n\n\terr = w.RegisterWorkflow(\n\n\t\t&worker.WorkflowJob{\n\t\t\tName: "concurrency-limit-round-robin-existing-workflows",\n\t\t\tDescription: "This limits concurrency to maxConcurrent runs at a time.",\n\t\t\tOn: worker.Events("test:concurrency-limit-round-robin-existing-workflows"),\n\t\t\tConcurrency: worker.Expression("input.concurrency_key").MaxRuns(int32(maxConcurrent)).LimitStrategy(types.GroupRoundRobin),\n\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &concurrencyLimitEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf("error getting input: %w", err)\n\t\t\t\t\t}\n\t\t\t\t\tconcurrencyKey := input.ConcurrencyKey\n\t\t\t\t\tcountMux.Lock()\n\n\t\t\t\t\tif countMap[concurrencyKey]+1 > maxConcurrent {\n\t\t\t\t\t\tcountMux.Unlock()\n\t\t\t\t\t\te := fmt.Errorf("concurrency limit exceeded for %d we have %d workers running", input.UserId, countMap[concurrencyKey])\n\t\t\t\t\t\terrChan <- e\n\t\t\t\t\t\treturn nil, e\n\t\t\t\t\t}\n\t\t\t\t\tcountMap[concurrencyKey]++\n\n\t\t\t\t\tcountMux.Unlock()\n\n\t\t\t\t\tfmt.Println("received event", input.UserId)\n\n\t\t\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t\t\tfmt.Println("processed event", input.UserId)\n\n\t\t\t\t\tcountMux.Lock()\n\t\t\t\t\tcountMap[concurrencyKey]--\n\t\t\t\t\tcountMux.Unlock()\n\n\t\t\t\t\tdone <- struct{}{}\n\n\t\t\t\t\treturn &stepOneOutput{}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\tvar workflowRuns []*client.WorkflowRun\n\n\t\tfor i := 0; i < 1; i++ {\n\t\t\tworkflowCount++\n\t\t\tevent := concurrencyLimitEvent{\n\t\t\t\tConcurrencyKey: "key",\n\t\t\t\tUserId: i,\n\t\t\t}\n\t\t\tworkflowRuns = append(workflowRuns, &client.WorkflowRun{\n\t\t\t\tName: "concurrency-limit-round-robin-existing-workflows",\n\t\t\t\tInput: event,\n\t\t\t})\n\n\t\t}\n\n\t\t// create a second one with a different key\n\n\t\t// so the bug we are testing here is that total concurrency for any one group should be 2\n\t\t// but if we have more than one group we end up with 4 running when only 2 + 1 are eligible to run\n\n\t\tfor i := 0; i < 3; i++ {\n\t\t\tworkflowCount++\n\n\t\t\tevent := concurrencyLimitEvent{\n\t\t\t\tConcurrencyKey: "secondKey",\n\t\t\t\tUserId: i,\n\t\t\t}\n\t\t\tworkflowRuns = append(workflowRuns, &client.WorkflowRun{\n\t\t\t\tName: "concurrency-limit-round-robin-existing-workflows",\n\t\t\t\tInput: event,\n\t\t\t})\n\n\t\t}\n\n\t\t_, err := c.Admin().BulkRunWorkflow(workflowRuns)\n\t\tif err != nil {\n\t\t\tfmt.Println("error running workflow", err)\n\t\t}\n\n\t\tfmt.Println("ran workflows")\n\n\t}()\n\n\ttime.Sleep(2 * time.Second)\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf("error starting worker: %w", err)\n\t}\n\tdefer cleanup()\n\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn nil\n\t\tcase <-time.After(20 * time.Second):\n\t\t\treturn fmt.Errorf("timeout")\n\t\tcase err := <-errChan:\n\t\t\treturn err\n\t\tcase <-done:\n\t\t\tcountMux.Lock()\n\t\t\tworkflowCount--\n\t\t\tcountMux.Unlock()\n\t\t\tif workflowCount == 0 {\n\t\t\t\ttime.Sleep(1 * time.Second)\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t}\n\t}\n}\n', - source: 'out/go/z_v0/limit-concurrency/group-round-robin-advanced/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts deleted file mode 100644 index 987217bbc..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype concurrencyLimitEvent struct {\n\tUserId int `json:"user_id"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\tinput := &concurrencyLimitEvent{}\n\terr := ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn "", fmt.Errorf("error getting input: %w", err)\n\t}\n\n\treturn fmt.Sprintf("%d", input.UserId), nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\terr = testSvc.On(\n\t\tworker.Events("concurrency-test-event-rr"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "concurrency-limit-round-robin",\n\t\t\tDescription: "This limits concurrency to 2 runs at a time.",\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(2).LimitStrategy(types.GroupRoundRobin),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &concurrencyLimitEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf("error getting input: %w", err)\n\t\t\t\t\t}\n\n\t\t\t\t\tfmt.Println("received event", input.UserId)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\tfmt.Println("processed event", input.UserId)\n\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println("interrupted")\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfor i := 0; i < 20; i++ {\n\t\t\tvar event concurrencyLimitEvent\n\n\t\t\tif i < 10 {\n\t\t\t\tevent = concurrencyLimitEvent{0}\n\t\t\t} else {\n\t\t\t\tevent = concurrencyLimitEvent{1}\n\t\t\t}\n\n\t\t\tc.Event().Push(context.Background(), "concurrency-test-event-rr", event)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println("interrupted")\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): //timeout\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\treturn fmt.Errorf("error cleaning up: %w", err)\n\t\t\t}\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', - source: 'out/go/z_v0/limit-concurrency/group-round-robin/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/index.ts deleted file mode 100644 index 3d31439c8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import * as cancel_in_progress from './cancel-in-progress'; -import * as group_round_robin from './group-round-robin'; -import * as group_round_robin_advanced from './group-round-robin-advanced'; - -export { cancel_in_progress }; -export { group_round_robin }; -export { group_round_robin_advanced }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/main.ts deleted file mode 100644 index 836e447a0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:log:simple"),\n\t\t\tName: "simple",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tConcurrency: worker.Expression("input.user_id"),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-one")\n\t\t\t\t\tevents <- "step-one"\n\n\t\t\t\t\tfor i := 0; i < 1000; i++ {\n\t\t\t\t\t\tctx.Log(fmt.Sprintf("step-one: %d", i))\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Username is: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf("pushing event user:create:simple")\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:log:simple",\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t"hello": "world",\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/logging/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/index.ts deleted file mode 100644 index 897a74a46..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import * as trigger from './trigger'; -import * as worker from './worker'; - -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/trigger/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/trigger/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/trigger/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/trigger/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/trigger/main.ts deleted file mode 100644 index 729545a95..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/trigger/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\ttime.Sleep(1 * time.Second)\n\n\t// trigger workflow\n\tworkflow, err := c.Admin().RunWorkflow(\n\t\t"post-user-update",\n\t\t&userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t},\n\t\tclient.WithRunMetadata(map[string]interface{}{\n\t\t\t"hello": "world",\n\t\t}),\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error running workflow: %w", err)\n\t}\n\n\tfmt.Println("workflow run id:", workflow.WorkflowRunId())\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\terr = c.Subscribe().On(interruptCtx, workflow.WorkflowRunId(), func(event client.WorkflowEvent) error {\n\t\tfmt.Println(event.EventPayload)\n\n\t\treturn nil\n\t})\n\n\treturn err\n}\n', - source: 'out/go/z_v0/manual-trigger/trigger/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/worker/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/worker/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/worker/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/worker/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/worker/main.ts deleted file mode 100644 index 5c7357f99..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/manual-trigger/worker/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\terr = testSvc.On(\n\t\tworker.Events("user:create:simple"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "post-user-update",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(1 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 1 got username: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 2 got username: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-one", step1Out)\n\n\t\t\t\t\tstep2Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-two", step2Out)\n\n\t\t\t\t\ttime.Sleep(3 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 3: has parents 1 and 2:" + step1Out.Message + ", " + step2Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-three").AddParents("step-one", "step-two"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-one", step1Out)\n\n\t\t\t\t\tstep3Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-three", step3Out)\n\n\t\t\t\t\ttime.Sleep(4 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 4: has parents 1 and 3" + step1Out.Message + ", " + step3Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-four").AddParents("step-one", "step-three"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep4Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput("step-four", step4Out)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: "Step 5: has parent 4" + step4Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-five").AddParents("step-four"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\treturn fmt.Errorf("error cleaning up: %w", err)\n\t}\n\n\treturn nil\n}\n', - source: 'out/go/z_v0/manual-trigger/worker/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/index.ts deleted file mode 100644 index 3042aa1d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main }; -export { run }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/main.ts deleted file mode 100644 index ce6384183..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tch := cmdutils.InterruptChan()\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("cleanup() error = %v", err))\n\t}\n}\n', - source: 'out/go/z_v0/middleware/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/run.ts deleted file mode 100644 index 1d2a3e3ee..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf("1st-middleware")\n\t\tevents <- "1st-middleware"\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), "testkey", "testvalue"))\n\t\treturn next(ctx)\n\t})\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf("2nd-middleware")\n\t\tevents <- "2nd-middleware"\n\n\t\t// time the function duration\n\t\tstart := time.Now()\n\t\terr := next(ctx)\n\t\tduration := time.Since(start)\n\t\tfmt.Printf("step function took %s\\n", duration)\n\t\treturn err\n\t})\n\n\ttestSvc := w.NewService("test")\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tevents <- "svc-middleware"\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), "svckey", "svcvalue"))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.On(\n\t\tworker.Events("user:create:middleware"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "middleware",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-one")\n\t\t\t\t\tevents <- "step-one"\n\n\t\t\t\t\ttestVal := ctx.Value("testkey").(string)\n\t\t\t\t\tevents <- testVal\n\t\t\t\t\tsvcVal := ctx.Value("svckey").(string)\n\t\t\t\t\tevents <- svcVal\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Username is: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput("step-one", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-two")\n\t\t\t\t\tevents <- "step-two"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Above message is: " + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two").AddParents("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf("pushing event user:create:middleware")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:create:middleware",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/middleware/run.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/main.ts deleted file mode 100644 index f4f5d9c99..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn "user-create", nil\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New(\n\t\tclient.WithNamespace("sample"),\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\terr = testSvc.On(\n\t\tworker.Events("user:create:simple"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "simple",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-one")\n\t\t\t\t\tevents <- "step-one"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Username is: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput("step-one", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-two")\n\t\t\t\t\tevents <- "step-two"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Above message is: " + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two").AddParents("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf("pushing event user:create:simple")\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:create:simple",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/namespaced/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/no-tls/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/no-tls/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/no-tls/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/no-tls/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/no-tls/main.ts deleted file mode 100644 index 63b01be28..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/no-tls/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype stepOutput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(fmt.Sprintf("error creating client: %v", err))\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf("error creating worker: %v", err))\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\terr = testSvc.On(\n\t\tworker.Events("simple"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "simple-workflow",\n\t\t\tDescription: "Simple one-step workflow.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tfmt.Println("executed step 1")\n\n\t\t\t\t\treturn &stepOutput{}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf("error registering workflow: %v", err))\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Sprintf("error starting worker: %v", err))\n\t}\n\n\t<-interruptCtx.Done()\n\tif err := cleanup(); err != nil {\n\t\tpanic(err)\n\t}\n}\n', - source: 'out/go/z_v0/no-tls/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/on-failure/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/on-failure/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/on-failure/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/on-failure/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/on-failure/main.ts deleted file mode 100644 index 433b0bef4..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/on-failure/main.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\n// > OnFailure Step\n// This workflow will fail because the step will throw an error\n// we define an onFailure step to handle this case\n\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t// 👀 this step will always raise an exception\n\treturn nil, fmt.Errorf("test on failure")\n}\n\nfunc OnFailure(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t// run cleanup code or notifications here\n\n\t// 👀 you can access the error from the failed step(s) like this\n\tfmt.Println(ctx.StepRunErrors())\n\n\treturn &stepOneOutput{\n\t\tMessage: "Failure!",\n\t}, nil\n}\n\nfunc main() {\n\t// ...\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// 👀 we define an onFailure step to handle this case\n\terr = w.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "on-failure-workflow",\n\t\t\tDescription: "This runs at a scheduled time.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName("step-one"),\n\t\t\t},\n\t\t\tOnFailure: &worker.WorkflowJob{\n\t\t\t\tName: "scheduled-workflow-failure",\n\t\t\t\tDescription: "This runs when the scheduled workflow fails.",\n\t\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\t\tworker.Fn(OnFailure).SetName("on-failure"),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t)\n\n\t// ...\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n\t// ,\n}\n\n', - source: 'out/go/z_v0/on-failure/main.go', - blocks: { - onfailure_step: { - start: 19, - stop: 108, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/procedural/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/procedural/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/procedural/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/procedural/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/procedural/main.ts deleted file mode 100644 index ba62114c8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/procedural/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"sync"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\t"golang.org/x/sync/errgroup"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\nconst NUM_CHILDREN = 50\n\ntype proceduralChildInput struct {\n\tIndex int `json:"index"`\n}\n\ntype proceduralChildOutput struct {\n\tIndex int `json:"index"`\n}\n\ntype proceduralParentOutput struct {\n\tChildSum int `json:"child_sum"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 5*NUM_CHILDREN)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\terr = testSvc.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "procedural-parent-workflow",\n\t\t\tDescription: "This is a test of procedural workflows.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(\n\t\t\t\t\tfunc(ctx worker.HatchetContext) (result *proceduralParentOutput, err error) {\n\t\t\t\t\t\tchildWorkflows := make([]*client.Workflow, NUM_CHILDREN)\n\n\t\t\t\t\t\tfor i := 0; i < NUM_CHILDREN; i++ {\n\t\t\t\t\t\t\tchildInput := proceduralChildInput{\n\t\t\t\t\t\t\t\tIndex: i,\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tchildWorkflow, err := ctx.SpawnWorkflow("procedural-child-workflow", childInput, &worker.SpawnWorkflowOpts{\n\t\t\t\t\t\t\t\tAdditionalMetadata: &map[string]string{\n\t\t\t\t\t\t\t\t\t"childKey": "childValue",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t})\n\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tchildWorkflows[i] = childWorkflow\n\n\t\t\t\t\t\t\tevents <- fmt.Sprintf("child-%d-started", i)\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\teg := errgroup.Group{}\n\n\t\t\t\t\t\teg.SetLimit(NUM_CHILDREN)\n\n\t\t\t\t\t\tchildOutputs := make([]int, 0)\n\t\t\t\t\t\tchildOutputsMu := sync.Mutex{}\n\n\t\t\t\t\t\tfor i, childWorkflow := range childWorkflows {\n\t\t\t\t\t\t\teg.Go(func(i int, childWorkflow *client.Workflow) func() error {\n\t\t\t\t\t\t\t\treturn func() error {\n\t\t\t\t\t\t\t\t\tchildResult, err := childWorkflow.Result()\n\n\t\t\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tchildOutput := proceduralChildOutput{}\n\n\t\t\t\t\t\t\t\t\terr = childResult.StepOutput("step-one", &childOutput)\n\n\t\t\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tchildOutputsMu.Lock()\n\t\t\t\t\t\t\t\t\tchildOutputs = append(childOutputs, childOutput.Index)\n\t\t\t\t\t\t\t\t\tchildOutputsMu.Unlock()\n\n\t\t\t\t\t\t\t\t\tevents <- fmt.Sprintf("child-%d-completed", childOutput.Index)\n\n\t\t\t\t\t\t\t\t\treturn nil\n\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}(i, childWorkflow))\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tfinishedCh := make(chan struct{})\n\n\t\t\t\t\t\tgo func() {\n\t\t\t\t\t\t\tdefer close(finishedCh)\n\t\t\t\t\t\t\terr = eg.Wait()\n\t\t\t\t\t\t}()\n\n\t\t\t\t\t\ttimer := time.NewTimer(60 * time.Second)\n\n\t\t\t\t\t\tselect {\n\t\t\t\t\t\tcase <-finishedCh:\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\tcase <-timer.C:\n\t\t\t\t\t\t\tincomplete := make([]int, 0)\n\t\t\t\t\t\t\t// print non-complete children\n\t\t\t\t\t\t\tfor i := range childWorkflows {\n\t\t\t\t\t\t\t\tcompleted := false\n\t\t\t\t\t\t\t\tfor _, childOutput := range childOutputs {\n\t\t\t\t\t\t\t\t\tif childOutput == i {\n\t\t\t\t\t\t\t\t\t\tcompleted = true\n\t\t\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tif !completed {\n\t\t\t\t\t\t\t\t\tincomplete = append(incomplete, i)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\treturn nil, fmt.Errorf("timed out waiting for the following child workflows to complete: %v", incomplete)\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tsum := 0\n\n\t\t\t\t\t\tfor _, childOutput := range childOutputs {\n\t\t\t\t\t\t\tsum += childOutput\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn &proceduralParentOutput{\n\t\t\t\t\t\t\tChildSum: sum,\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t},\n\t\t\t\t).SetTimeout("10m"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\terr = testSvc.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "procedural-child-workflow",\n\t\t\tDescription: "This is a test of procedural workflows.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(\n\t\t\t\t\tfunc(ctx worker.HatchetContext) (result *proceduralChildOutput, err error) {\n\t\t\t\t\t\tinput := proceduralChildInput{}\n\n\t\t\t\t\t\terr = ctx.WorkflowInput(&input)\n\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn &proceduralChildOutput{\n\t\t\t\t\t\t\tIndex: input.Index,\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\ttime.Sleep(1 * time.Second)\n\n\t\t_, err := c.Admin().RunWorkflow("procedural-parent-workflow", nil)\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error running workflow: %w", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/procedural/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/rate-limit/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/rate-limit/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/rate-limit/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/rate-limit/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/rate-limit/main.ts deleted file mode 100644 index 3fe95b4eb..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/rate-limit/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/client/types"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype rateLimitInput struct {\n\tIndex int `json:"index"`\n\tUserId string `json:"user_id"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &rateLimitInput{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tctx.StreamEvent([]byte(fmt.Sprintf("This is a stream event %d", input.Index)))\n\n\treturn &stepOneOutput{\n\t\tMessage: fmt.Sprintf("This ran at %s", time.Now().String()),\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = c.Admin().PutRateLimit("api1", &types.RateLimitOpts{\n\t\tMax: 12,\n\t\tDuration: types.Minute,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tunitExpr := "int(input.index) + 1"\n\tkeyExpr := "input.user_id"\n\tlimitValueExpr := "3"\n\n\terr = w.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "rate-limit-workflow",\n\t\t\tDescription: "This illustrates rate limiting.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName("step-one").SetRateLimit(\n\t\t\t\t\tworker.RateLimit{\n\t\t\t\t\t\tKey: "per-user-rate-limit",\n\t\t\t\t\t\tKeyExpr: &keyExpr,\n\t\t\t\t\t\tUnitsExpr: &unitExpr,\n\t\t\t\t\t\tLimitValueExpr: &limitValueExpr,\n\t\t\t\t\t},\n\t\t\t\t),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor i := 0; i < 12; i++ {\n\t\tfor j := 0; j < 3; j++ {\n\t\t\t_, err = c.Admin().RunWorkflow("rate-limit-workflow", &rateLimitInput{\n\t\t\t\tIndex: j,\n\t\t\t\tUserId: fmt.Sprintf("user-%d", i),\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t}\n\t}\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n}\n', - source: 'out/go/z_v0/rate-limit/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/main.ts deleted file mode 100644 index 814c37c38..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserId string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc StepOne(ctx context.Context, input *userCreateEvent) (result *stepOneOutput, err error) {\n\t// could get from context\n\t// testVal := ctx.Value("testkey").(string)\n\t// svcVal := ctx.Value("svckey").(string)\n\n\treturn &stepOneOutput{\n\t\tMessage: "Username is: " + input.Username,\n\t}, nil\n}\n\nfunc StepTwo(ctx context.Context, input *stepOneOutput) (result *stepOneOutput, err error) {\n\treturn &stepOneOutput{\n\t\tMessage: "Above message is: " + input.Message,\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), "testkey", "testvalue"))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.RegisterAction(StepOne, worker.WithActionName("step-one"))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.RegisterAction(StepTwo, worker.WithActionName("step-two"))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.On(\n\t\tworker.Events("user:create", "user:update"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "post-user-update",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\t// example of calling a registered action from the worker (includes service name)\n\t\t\t\tw.Call("test:step-one"),\n\t\t\t\t// example of calling a registered action from a service\n\t\t\t\ttestSvc.Call("step-two"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction("echo:echo", func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t"message": input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction("echo:object", func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: "echo-test",\n\t\tUserId: "1234",\n\t\tData: map[string]string{\n\t\t\t"test": "test",\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t"user:create",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interrupt:\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t\t\t}\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', - source: 'out/go/z_v0/register-action/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries-with-backoff/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries-with-backoff/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries-with-backoff/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries-with-backoff/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries-with-backoff/main.ts deleted file mode 100644 index 3ed000368..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries-with-backoff/main.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\n// > Backoff\n\n// ... normal function definition\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tif ctx.RetryCount() < 3 {\n\t\treturn nil, fmt.Errorf("failure")\n\t}\n\n\treturn &stepOneOutput{\n\t\tMessage: "Success!",\n\t}, nil\n}\n\n// ,\n\nfunc main() {\n\t// ...\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ,\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tName: "retry-with-backoff-workflow",\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tDescription: "Demonstrates retry with exponential backoff.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName("with-backoff").\n\t\t\t\t\tSetRetries(10).\n\t\t\t\t\t// 👀 Backoff configuration\n\t\t\t\t\t// 👀 Maximum number of seconds to wait between retries\n\t\t\t\t\tSetRetryBackoffFactor(2.0).\n\t\t\t\t\t// 👀 Factor to increase the wait time between retries.\n\t\t\t\t\t// This sequence will be 2s, 4s, 8s, 16s, 32s, 60s... due to the maxSeconds limit\n\t\t\t\t\tSetRetryMaxBackoffSeconds(60),\n\t\t\t},\n\t\t},\n\t)\n\n\t// ...\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n\t<-interruptCtx.Done()\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n\t// ,\n}\n\n', - source: 'out/go/z_v0/retries-with-backoff/main.go', - blocks: { - backoff: { - start: 18, - stop: 98, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/main.ts deleted file mode 100644 index 313486dda..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn "user-create", nil\n}\n\ntype retryWorkflow struct {\n\tretries int\n}\n\nfunc (r *retryWorkflow) StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &userCreateEvent{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif r.retries < 2 {\n\t\tr.retries++\n\t\treturn nil, fmt.Errorf("error")\n\t}\n\n\tlog.Printf("finished step-one")\n\treturn &stepOneOutput{\n\t\tMessage: "Username is: " + input.Username,\n\t}, nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\ttestSvc := w.NewService("test")\n\n\twk := &retryWorkflow{}\n\n\terr = testSvc.On(\n\t\tworker.Events("user:create:simple"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "simple",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(wk.StepOne).SetName("step-one").SetRetries(4),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: "echo-test",\n\t\tUserID: "1234",\n\t\tData: map[string]string{\n\t\t\t"test": "test",\n\t\t},\n\t}\n\n\tlog.Printf("pushing event user:create:simple")\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t"user:create:simple",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf("error pushing event: %w", err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\treturn fmt.Errorf("error cleaning up worker: %w", err)\n\t}\n\n\treturn nil\n}\n', - source: 'out/go/z_v0/retries/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/scheduled/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/scheduled/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/scheduled/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/scheduled/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/scheduled/main.ts deleted file mode 100644 index 66f701fe3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/scheduled/main.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\n// > Create\n// ... normal workflow definition\ntype printOutput struct{}\n\nfunc print(ctx context.Context) (result *printOutput, err error) {\n\tfmt.Println("called print:print")\n\n\treturn &printOutput{}, nil\n}\n\n// ,\nfunc main() {\n\t// ... initialize client, worker and workflow\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: "schedule-workflow",\n\t\t\tDescription: "Demonstrates a simple scheduled workflow",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(print),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ,\n\n\tgo func() {\n\t\t// 👀 define the scheduled workflow to run in a minute\n\t\tschedule, err := c.Schedule().Create(\n\t\t\tcontext.Background(),\n\t\t\t"schedule-workflow",\n\t\t\t&client.ScheduleOpts{\n\t\t\t\t// 👀 define the time to run the scheduled workflow, in UTC\n\t\t\t\tTriggerAt: time.Now().UTC().Add(time.Minute),\n\t\t\t\tInput: map[string]interface{}{\n\t\t\t\t\t"message": "Hello, world!",\n\t\t\t\t},\n\t\t\t\tAdditionalMetadata: map[string]string{},\n\t\t\t},\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tfmt.Println(schedule.TriggerAt, schedule.WorkflowName)\n\t}()\n\n\t// ... wait for interrupt signal\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n\t// ,\n}\n\n\nfunc ListScheduledWorkflows() {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > List\n\tschedules, err := c.Schedule().List(context.Background())\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor _, schedule := range *schedules.Rows {\n\t\tfmt.Println(schedule.TriggerAt, schedule.WorkflowName)\n\t}\n}\n\nfunc DeleteScheduledWorkflow(id string) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > Delete\n\t// 👀 id is the schedule\'s metadata id, can get it via schedule.Metadata.Id\n\terr = c.Schedule().Delete(context.Background(), id)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n', - source: 'out/go/z_v0/scheduled/main.go', - blocks: { - create: { - start: 16, - stop: 107, - }, - list: { - start: 117, - stop: 117, - }, - delete: { - start: 136, - stop: 137, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/main.ts deleted file mode 100644 index fbbcb21b0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events("user:create:simple"),\n\t\t\tName: "simple",\n\t\t\tDescription: "This runs after an update to the user model.",\n\t\t\tConcurrency: worker.Expression("input.user_id"),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-one")\n\t\t\t\t\tevents <- "step-one"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Username is: " + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName("step-one"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput("step-one", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf("step-two")\n\t\t\t\t\tevents <- "step-two"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: "Above message is: " + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName("step-two").AddParents("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf("pushing event user:create:simple")\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:create:simple",\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t"hello": "world",\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/simple/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event-by-meta/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event-by-meta/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event-by-meta/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event-by-meta/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event-by-meta/main.ts deleted file mode 100644 index 99bff8c9d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event-by-meta/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"math/rand/v2"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype streamEventInput struct {\n\tIndex int `json:"index"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &streamEventInput{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tctx.StreamEvent([]byte(fmt.Sprintf("This is a stream event %d", input.Index)))\n\n\treturn &stepOneOutput{\n\t\tMessage: fmt.Sprintf("This ran at %s", time.Now().String()),\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "stream-event-workflow",\n\t\t\tDescription: "This sends a stream event.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\t_, err = w.Start()\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n\t// Generate a random number between 1 and 100\n\tstreamKey := "streamKey"\n\tstreamValue := fmt.Sprintf("stream-event-%d", rand.IntN(100)+1)\n\n\t_, err = c.Admin().RunWorkflow("stream-event-workflow", &streamEventInput{\n\t\tIndex: 0,\n\t},\n\t\tclient.WithRunMetadata(map[string]interface{}{\n\t\t\tstreamKey: streamValue,\n\t\t}),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = c.Subscribe().StreamByAdditionalMetadata(interruptCtx, streamKey, streamValue, func(event client.StreamEvent) error {\n\t\tfmt.Println(string(event.Message))\n\t\treturn nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n', - source: 'out/go/z_v0/stream-event-by-meta/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event/index.ts deleted file mode 100644 index 65b980543..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event/main.ts deleted file mode 100644 index 74bd996be..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/stream-event/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/cmdutils"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype streamEventInput struct {\n\tIndex int `json:"index"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &streamEventInput{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tctx.StreamEvent([]byte(fmt.Sprintf("This is a stream event %d", input.Index)))\n\n\treturn &stepOneOutput{\n\t\tMessage: fmt.Sprintf("This ran at %s", time.Now().String()),\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: "stream-event-workflow",\n\t\t\tDescription: "This sends a stream event.",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName("step-one"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\t_, err = w.Start()\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error cleaning up: %w", err))\n\t}\n\n\tworkflow, err := c.Admin().RunWorkflow("stream-event-workflow", &streamEventInput{\n\t\tIndex: 0,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = c.Subscribe().Stream(interruptCtx, workflow.WorkflowRunId(), func(event client.StreamEvent) error {\n\t\tfmt.Println(string(event.Message))\n\n\t\treturn nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n', - source: 'out/go/z_v0/stream-event/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/index.ts deleted file mode 100644 index 3042aa1d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main }; -export { run }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/main.ts deleted file mode 100644 index d0c46441d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/main.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"time"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\n\t// > TimeoutStep\n\tcleanup, err := run(events, worker.WorkflowJob{\n\t\tName: "timeout",\n\t\tDescription: "timeout",\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\ttime.Sleep(time.Second * 60)\n\t\t\t\treturn nil, nil\n\t\t\t}).SetName("step-one").SetTimeout("10s"),\n\t\t},\n\t})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-events\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf("cleanup() error = %v", err))\n\t}\n}\n', - source: 'out/go/z_v0/timeout/main.go', - blocks: { - timeoutstep: { - start: 31, - stop: 40, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/run.ts deleted file mode 100644 index f5605065f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"fmt"\n\t"log"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\nfunc run(done chan<- string, job worker.WorkflowJob) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating client: %w", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error creating worker: %w", err)\n\t}\n\n\terr = w.On(\n\t\tworker.Events("user:create:timeout"),\n\t\t&job,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error registering workflow: %w", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf("pushing event")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: "echo-test",\n\t\t\tUserID: "1234",\n\t\t\tData: map[string]string{\n\t\t\t\t"test": "test",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t"user:create:timeout",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf("error pushing event: %w", err))\n\t\t}\n\n\t\ttime.Sleep(20 * time.Second)\n\n\t\tdone <- "done"\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf("error starting worker: %w", err)\n\t}\n\n\treturn cleanup, nil\n}\n', - source: 'out/go/z_v0/timeout/run.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/index.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/index.ts deleted file mode 100644 index 3042aa1d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main }; -export { run }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/main.ts deleted file mode 100644 index 3a9798e76..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/main.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"fmt"\n\t"log"\n\n\t"github.com/joho/godotenv"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:"username"`\n\tUserID string `json:"user_id"`\n\tData map[string]string `json:"data"`\n}\n\ntype output struct {\n\tMessage string `json:"message"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error creating client: %w", err))\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\tpanic(fmt.Errorf("error creating worker: %w", err))\n\t}\n\n\tworkflow := "webhook"\n\tevent := "user:create:webhook"\n\twf := &worker.WorkflowJob{\n\t\tName: workflow,\n\t\tDescription: workflow,\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *output, err error) {\n\t\t\t\tlog.Printf("step name: %s", ctx.StepName())\n\t\t\t\treturn &output{\n\t\t\t\t\tMessage: "hi from " + ctx.StepName(),\n\t\t\t\t}, nil\n\t\t\t}).SetName("webhook-step-one").SetTimeout("10s"),\n\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *output, err error) {\n\t\t\t\tlog.Printf("step name: %s", ctx.StepName())\n\t\t\t\treturn &output{\n\t\t\t\t\tMessage: "hi from " + ctx.StepName(),\n\t\t\t\t}, nil\n\t\t\t}).SetName("webhook-step-one").SetTimeout("10s"),\n\t\t},\n\t}\n\n\thandler := w.WebhookHttpHandler(worker.WebhookHandlerOptions{\n\t\tSecret: "secret",\n\t}, wf)\n\tport := "8741"\n\terr = run("webhook-demo", w, port, handler, c, workflow, event)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n', - source: 'out/go/z_v0/webhook/main.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/run.ts deleted file mode 100644 index eeb86ad56..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'go', - content: - 'package main\n\nimport (\n\t"context"\n\t"errors"\n\t"fmt"\n\t"log"\n\t"net/http"\n\t"time"\n\n\t"github.com/hatchet-dev/hatchet/pkg/client"\n\t"github.com/hatchet-dev/hatchet/pkg/worker"\n)\n\nfunc run(\n\tname string,\n\tw *worker.Worker,\n\tport string,\n\thandler func(w http.ResponseWriter, r *http.Request), c client.Client, workflow string, event string,\n) error {\n\t// create webserver to handle webhook requests\n\tmux := http.NewServeMux()\n\n\t// Register the HelloHandler to the /hello route\n\tmux.HandleFunc("/webhook", handler)\n\n\t// Create a custom server\n\tserver := &http.Server{\n\t\tAddr: ":" + port,\n\t\tHandler: mux,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tIdleTimeout: 15 * time.Second,\n\t}\n\n\tdefer func(server *http.Server, ctx context.Context) {\n\t\terr := server.Shutdown(ctx)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}(server, context.Background())\n\n\tgo func() {\n\t\tif err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tsecret := "secret"\n\tif err := w.RegisterWebhook(worker.RegisterWebhookWorkerOpts{\n\t\tName: "test-" + name,\n\t\tURL: fmt.Sprintf("http://localhost:%s/webhook", port),\n\t\tSecret: &secret,\n\t}); err != nil {\n\t\treturn fmt.Errorf("error setting up webhook: %w", err)\n\t}\n\n\ttime.Sleep(30 * time.Second)\n\n\tlog.Printf("pushing event")\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: "echo-test",\n\t\tUserID: "1234",\n\t\tData: map[string]string{\n\t\t\t"test": "test",\n\t\t},\n\t}\n\n\t// push an event\n\terr := c.Event().Push(\n\t\tcontext.Background(),\n\t\tevent,\n\t\ttestEvent,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf("error pushing event: %w", err)\n\t}\n\n\ttime.Sleep(5 * time.Second)\n\n\treturn nil\n}\n', - source: 'out/go/z_v0/webhook/run.go', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/index.ts b/frontend/app/src/next/lib/docs/generated/snips/index.ts deleted file mode 100644 index 4dd9517ba..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import types from './types'; -import * as go from './go'; -import * as python from './python'; -import * as typescript from './typescript'; - -export { types }; -export { go }; -export { python }; -export { typescript }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/__init__.ts b/frontend/app/src/next/lib/docs/generated/snips/python/__init__.ts deleted file mode 100644 index 285f78fcb..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/__init__.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: '', - source: 'out/python/__init__.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/index.ts deleted file mode 100644 index 245a4ee68..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/trigger.ts deleted file mode 100644 index 0f5cc2dca..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.affinity_workers.worker import affinity_worker_workflow\nfrom hatchet_sdk import TriggerWorkflowOptions\n\naffinity_worker_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata={"hello": "moon"}),\n)\n', - source: 'out/python/affinity_workers/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/worker.ts deleted file mode 100644 index 9e0877d86..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/affinity_workers/worker.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Context, EmptyModel, Hatchet, WorkerLabelComparator\nfrom hatchet_sdk.labels import DesiredWorkerLabel\n\nhatchet = Hatchet(debug=True)\n\n\n# > AffinityWorkflow\n\naffinity_worker_workflow = hatchet.workflow(name="AffinityWorkflow")\n\n\n@affinity_worker_workflow.task(\n desired_worker_labels={\n "model": DesiredWorkerLabel(value="fancy-ai-model-v2", weight=10),\n "memory": DesiredWorkerLabel(\n value=256,\n required=True,\n comparator=WorkerLabelComparator.LESS_THAN,\n ),\n },\n)\n\n\n\n# > AffinityTask\nasync def step(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n if ctx.worker.labels().get("model") != "fancy-ai-model-v2":\n ctx.worker.upsert_labels({"model": "unset"})\n # DO WORK TO EVICT OLD MODEL / LOAD NEW MODEL\n ctx.worker.upsert_labels({"model": "fancy-ai-model-v2"})\n\n return {"worker": ctx.worker.id()}\n\n\n\n\ndef main() -> None:\n\n # > AffinityWorker\n worker = hatchet.worker(\n "affinity-worker",\n slots=10,\n labels={\n "model": "fancy-ai-model-v2",\n "memory": 512,\n },\n workflows=[affinity_worker_workflow],\n )\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/affinity_workers/worker.py', - blocks: { - affinityworkflow: { - start: 8, - stop: 22, - }, - affinitytask: { - start: 26, - stop: 34, - }, - affinityworker: { - start: 40, - stop: 51, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/api/api.ts b/frontend/app/src/next/lib/docs/generated/snips/python/api/api.ts deleted file mode 100644 index da2c053a8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/api/api.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n workflow_list = hatchet.workflows.list()\n rows = workflow_list.rows or []\n\n for workflow in rows:\n print(workflow.name)\n print(workflow.metadata.id)\n print(workflow.metadata.created_at)\n print(workflow.metadata.updated_at)\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/api/api.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/api/async_api.ts b/frontend/app/src/next/lib/docs/generated/snips/python/api/async_api.ts deleted file mode 100644 index 8d95f7122..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/api/async_api.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nasync def main() -> None:\n workflow_list = await hatchet.workflows.aio_list()\n rows = workflow_list.rows or []\n\n for workflow in rows:\n print(workflow.name)\n print(workflow.metadata.id)\n print(workflow.metadata.created_at)\n print(workflow.metadata.updated_at)\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/api/async_api.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/api/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/api/index.ts deleted file mode 100644 index 6a2990fbc..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/api/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import api from './api'; -import async_api from './async_api'; - -export { api }; -export { async_api }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/blocking_example_trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/blocking_example_trigger.ts deleted file mode 100644 index 14a4fffe2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/blocking_example_trigger.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Trigger\nimport time\n\nfrom examples.blocked_async.blocking_example_worker import (\n blocking,\n non_blocking_async,\n non_blocking_sync,\n)\n\nnon_blocking_sync.run_no_wait()\nnon_blocking_async.run_no_wait()\n\ntime.sleep(1)\n\nblocking.run_no_wait()\n\ntime.sleep(1)\n\nnon_blocking_sync.run_no_wait()\n\n', - source: 'out/python/blocked_async/blocking_example_trigger.py', - blocks: { - trigger: { - start: 2, - stop: 20, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/blocking_example_worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/blocking_example_worker.ts deleted file mode 100644 index b0ae9ed69..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/blocking_example_worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Worker\nimport asyncio\nimport time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet()\n\nSLEEP_TIME = 6\n\n\n@hatchet.task()\nasync def non_blocking_async(input: EmptyModel, ctx: Context) -> None:\n for i in range(SLEEP_TIME):\n print("Non blocking async", i)\n await asyncio.sleep(1)\n\n\n@hatchet.task()\ndef non_blocking_sync(input: EmptyModel, ctx: Context) -> None:\n for i in range(SLEEP_TIME):\n print("Non blocking sync", i)\n time.sleep(1)\n\n\n@hatchet.task()\nasync def blocking(input: EmptyModel, ctx: Context) -> None:\n for i in range(SLEEP_TIME):\n print("Blocking", i)\n time.sleep(1)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "test-worker", workflows=[non_blocking_async, non_blocking_sync, blocking]\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/blocked_async/blocking_example_worker.py', - blocks: { - worker: { - start: 2, - stop: 32, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/debugging.ts b/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/debugging.ts deleted file mode 100644 index 13df327d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/debugging.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Functions\nimport asyncio\nimport time\n\nSLEEP_TIME = 3\n\n\nasync def blocking() -> None:\n for i in range(SLEEP_TIME):\n print("Blocking", i)\n time.sleep(1)\n\n\nasync def non_blocking(task_id: str = "Non-blocking") -> None:\n for i in range(SLEEP_TIME):\n print(task_id, i)\n await asyncio.sleep(1)\n\n\n\n\n# > Blocked\nasync def blocked() -> None:\n loop = asyncio.get_event_loop()\n\n await asyncio.gather(\n *[\n loop.create_task(blocking()),\n loop.create_task(non_blocking()),\n ]\n )\n\n\n\n\n# > Unblocked\nasync def working() -> None:\n loop = asyncio.get_event_loop()\n\n await asyncio.gather(\n *[\n loop.create_task(non_blocking("A")),\n loop.create_task(non_blocking("B")),\n ]\n )\n\n\n\n\nif __name__ == "__main__":\n asyncio.run(blocked())\n asyncio.run(working())\n', - source: 'out/python/blocked_async/debugging.py', - blocks: { - functions: { - start: 2, - stop: 19, - }, - blocked: { - start: 23, - stop: 33, - }, - unblocked: { - start: 37, - stop: 47, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/index.ts deleted file mode 100644 index 0fd9cad13..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import blocking_example_trigger from './blocking_example_trigger'; -import blocking_example_worker from './blocking_example_worker'; -import debugging from './debugging'; -import trigger from './trigger'; -import worker from './worker'; - -export { blocking_example_trigger }; -export { blocking_example_worker }; -export { debugging }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/trigger.ts deleted file mode 100644 index 893031422..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.blocked_async.worker import blocked_worker_workflow\nfrom hatchet_sdk import TriggerWorkflowOptions\n\nblocked_worker_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata={"hello": "moon"}),\n)\n', - source: 'out/python/blocked_async/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/worker.ts deleted file mode 100644 index a16d3006a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/blocked_async/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import hashlib\nimport time\nfrom datetime import timedelta\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# WARNING: this is an example of what NOT to do\n# This workflow is intentionally blocking the main thread\n# and will block the worker from processing other workflows\n#\n# You do not want to run long sync functions in an async def function\n\nblocked_worker_workflow = hatchet.workflow(name="Blocked")\n\n\n@blocked_worker_workflow.task(execution_timeout=timedelta(seconds=11), retries=3)\nasync def step1(input: EmptyModel, ctx: Context) -> dict[str, str | int | float]:\n print("Executing step1")\n\n # CPU-bound task: Calculate a large number of SHA-256 hashes\n start_time = time.time()\n iterations = 10_000_000\n for i in range(iterations):\n hashlib.sha256(f"data{i}".encode()).hexdigest()\n\n end_time = time.time()\n execution_time = end_time - start_time\n\n print(f"Completed {iterations} hash calculations in {execution_time:.2f} seconds")\n\n return {\n "step1": "step1",\n "iterations": iterations,\n "execution_time": execution_time,\n }\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "blocked-worker", slots=3, workflows=[blocked_worker_workflow]\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/blocked_async/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/bulk_trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/bulk_trigger.ts deleted file mode 100644 index 15ec8acb0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/bulk_trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom examples.bulk_fanout.worker import ParentInput, bulk_parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\nhatchet = Hatchet()\n\n\nasync def main() -> None:\n results = bulk_parent_wf.run_many(\n workflows=[\n bulk_parent_wf.create_bulk_run_item(\n input=ParentInput(n=i),\n options=TriggerWorkflowOptions(\n additional_metadata={\n "bulk-trigger": i,\n "hello-{i}": "earth-{i}",\n }\n ),\n )\n for i in range(20)\n ],\n )\n\n for result in results:\n print(result)\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/bulk_fanout/bulk_trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/index.ts deleted file mode 100644 index 0e38514ca..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import bulk_trigger from './bulk_trigger'; -import stream from './stream'; -import test_bulk_fanout from './test_bulk_fanout'; -import trigger from './trigger'; -import worker from './worker'; - -export { bulk_trigger }; -export { stream }; -export { test_bulk_fanout }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/stream.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/stream.ts deleted file mode 100644 index 28979aa2d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/stream.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nimport random\n\nfrom examples.bulk_fanout.worker import ParentInput, bulk_parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\n\nasync def main() -> None:\n hatchet = Hatchet()\n\n # Generate a random stream key to use to track all\n # stream events for this workflow run.\n\n streamKey = "streamKey"\n streamVal = f"sk-{random.randint(1, 100)}"\n\n # Specify the stream key as additional metadata\n # when running the workflow.\n\n # This key gets propagated to all child workflows\n # and can have an arbitrary property name.\n bulk_parent_wf.run(\n input=ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={streamKey: streamVal}),\n )\n\n # Stream all events for the additional meta key value\n listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal)\n\n async for event in listener:\n print(event.type, event.payload)\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/bulk_fanout/stream.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/test_bulk_fanout.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/test_bulk_fanout.ts deleted file mode 100644 index 10d5b7cbe..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/test_bulk_fanout.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import pytest\n\nfrom examples.bulk_fanout.worker import ParentInput, bulk_parent_wf\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run() -> None:\n result = await bulk_parent_wf.aio_run(input=ParentInput(n=12))\n\n assert len(result["spawn"]["results"]) == 12\n', - source: 'out/python/bulk_fanout/test_bulk_fanout.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/trigger.ts deleted file mode 100644 index 583502b17..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.bulk_fanout.worker import ParentInput, bulk_parent_wf\nfrom hatchet_sdk import TriggerWorkflowOptions\n\nbulk_parent_wf.run(\n ParentInput(n=999),\n TriggerWorkflowOptions(additional_metadata={"no-dedupe": "world"}),\n)\n', - source: 'out/python/bulk_fanout/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/worker.ts deleted file mode 100644 index 3fcafec82..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_fanout/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from datetime import timedelta\nfrom typing import Any\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\nhatchet = Hatchet(debug=True)\n\n\nclass ParentInput(BaseModel):\n n: int = 100\n\n\nclass ChildInput(BaseModel):\n a: str\n\n\nbulk_parent_wf = hatchet.workflow(name="BulkFanoutParent", input_validator=ParentInput)\nbulk_child_wf = hatchet.workflow(name="BulkFanoutChild", input_validator=ChildInput)\n\n\n# > BulkFanoutParent\n@bulk_parent_wf.task(execution_timeout=timedelta(minutes=5))\nasync def spawn(input: ParentInput, ctx: Context) -> dict[str, list[dict[str, Any]]]:\n # 👀 Create each workflow run to spawn\n child_workflow_runs = [\n bulk_child_wf.create_bulk_run_item(\n input=ChildInput(a=str(i)),\n key=f"child{i}",\n options=TriggerWorkflowOptions(additional_metadata={"hello": "earth"}),\n )\n for i in range(input.n)\n ]\n\n # 👀 Run workflows in bulk to improve performance\n spawn_results = await bulk_child_wf.aio_run_many(child_workflow_runs)\n\n return {"results": spawn_results}\n\n\n\n\n@bulk_child_wf.task()\ndef process(input: ChildInput, ctx: Context) -> dict[str, str]:\n print(f"child process {input.a}")\n return {"status": "success " + input.a}\n\n\n@bulk_child_wf.task()\ndef process2(input: ChildInput, ctx: Context) -> dict[str, str]:\n print("child process2")\n return {"status2": "success"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "fanout-worker", slots=40, workflows=[bulk_parent_wf, bulk_child_wf]\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/bulk_fanout/worker.py', - blocks: { - bulkfanoutparent: { - start: 25, - stop: 42, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/cancel.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/cancel.ts deleted file mode 100644 index 385624c32..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/cancel.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Setup\n\nfrom datetime import datetime, timedelta, timezone\n\nfrom hatchet_sdk import BulkCancelReplayOpts, Hatchet, RunFilter, V1TaskStatus\n\nhatchet = Hatchet()\n\nworkflows = hatchet.workflows.list()\n\nassert workflows.rows\n\nworkflow = workflows.rows[0]\n\n\n# > List runs\nworkflow_runs = hatchet.runs.list(workflow_ids=[workflow.metadata.id])\n\n# > Cancel by run ids\nworkflow_run_ids = [workflow_run.metadata.id for workflow_run in workflow_runs.rows]\n\nbulk_cancel_by_ids = BulkCancelReplayOpts(ids=workflow_run_ids)\n\nhatchet.runs.bulk_cancel(bulk_cancel_by_ids)\n\n# > Cancel by filters\n\nbulk_cancel_by_filters = BulkCancelReplayOpts(\n filters=RunFilter(\n since=datetime.today() - timedelta(days=1),\n until=datetime.now(tz=timezone.utc),\n statuses=[V1TaskStatus.RUNNING],\n workflow_ids=[workflow.metadata.id],\n additional_metadata={"key": "value"},\n )\n)\n\nhatchet.runs.bulk_cancel(bulk_cancel_by_filters)\n', - source: 'out/python/bulk_operations/cancel.py', - blocks: { - setup: { - start: 2, - stop: 14, - }, - list_runs: { - start: 17, - stop: 17, - }, - cancel_by_run_ids: { - start: 20, - stop: 24, - }, - cancel_by_filters: { - start: 27, - stop: 38, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/index.ts deleted file mode 100644 index 40e0bd607..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import cancel from './cancel'; -import replay from './replay'; -import test_bulk_replay from './test_bulk_replay'; -import worker from './worker'; - -export { cancel }; -export { replay }; -export { test_bulk_replay }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/replay.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/replay.ts deleted file mode 100644 index e69ab0a8e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/replay.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Setup\n\nfrom datetime import datetime, timedelta, timezone\n\nfrom hatchet_sdk import BulkCancelReplayOpts, Hatchet, RunFilter, V1TaskStatus\n\nhatchet = Hatchet()\n\nworkflows = hatchet.workflows.list()\n\nassert workflows.rows\n\nworkflow = workflows.rows[0]\n\n\n# > List runs\nworkflow_runs = hatchet.runs.list(workflow_ids=[workflow.metadata.id])\n\n# > Replay by run ids\nworkflow_run_ids = [workflow_run.metadata.id for workflow_run in workflow_runs.rows]\n\nbulk_replay_by_ids = BulkCancelReplayOpts(ids=workflow_run_ids)\n\nhatchet.runs.bulk_replay(bulk_replay_by_ids)\n\n# > Replay by filters\nbulk_replay_by_filters = BulkCancelReplayOpts(\n filters=RunFilter(\n since=datetime.today() - timedelta(days=1),\n until=datetime.now(tz=timezone.utc),\n statuses=[V1TaskStatus.RUNNING],\n workflow_ids=[workflow.metadata.id],\n additional_metadata={"key": "value"},\n )\n)\n\nhatchet.runs.bulk_replay(bulk_replay_by_filters)\n', - source: 'out/python/bulk_operations/replay.py', - blocks: { - setup: { - start: 2, - stop: 14, - }, - list_runs: { - start: 17, - stop: 17, - }, - replay_by_run_ids: { - start: 20, - stop: 24, - }, - replay_by_filters: { - start: 27, - stop: 37, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/test_bulk_replay.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/test_bulk_replay.ts deleted file mode 100644 index fd6d67602..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/test_bulk_replay.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom datetime import datetime, timedelta, timezone\nfrom uuid import uuid4\n\nimport pytest\n\nfrom examples.bulk_operations.worker import (\n bulk_replay_test_1,\n bulk_replay_test_2,\n bulk_replay_test_3,\n)\nfrom hatchet_sdk import BulkCancelReplayOpts, Hatchet, RunFilter, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_bulk_replay(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n n = 100\n\n with pytest.raises(Exception):\n await bulk_replay_test_1.aio_run_many(\n [\n bulk_replay_test_1.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata={\n "test_run_id": test_run_id,\n }\n )\n )\n for _ in range(n + 1)\n ]\n )\n\n with pytest.raises(Exception):\n await bulk_replay_test_2.aio_run_many(\n [\n bulk_replay_test_2.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata={\n "test_run_id": test_run_id,\n }\n )\n )\n for _ in range((n // 2) - 1)\n ]\n )\n\n with pytest.raises(Exception):\n await bulk_replay_test_3.aio_run_many(\n [\n bulk_replay_test_3.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata={\n "test_run_id": test_run_id,\n }\n )\n )\n for _ in range((n // 2) - 2)\n ]\n )\n\n workflow_ids = [\n bulk_replay_test_1.id,\n bulk_replay_test_2.id,\n bulk_replay_test_3.id,\n ]\n\n ## Should result in two batches of replays\n await hatchet.runs.aio_bulk_replay(\n opts=BulkCancelReplayOpts(\n filters=RunFilter(\n workflow_ids=workflow_ids,\n since=datetime.now(tz=timezone.utc) - timedelta(minutes=2),\n additional_metadata={"test_run_id": test_run_id},\n )\n )\n )\n\n await asyncio.sleep(10)\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=workflow_ids,\n since=datetime.now(tz=timezone.utc) - timedelta(minutes=2),\n additional_metadata={"test_run_id": test_run_id},\n limit=1000,\n )\n\n assert len(runs.rows) == n + 1 + (n // 2 - 1) + (n // 2 - 2)\n\n for run in runs.rows:\n assert run.status == V1TaskStatus.COMPLETED\n assert run.retry_count == 1\n assert run.attempt == 2\n\n assert (\n len([r for r in runs.rows if r.workflow_id == bulk_replay_test_1.id]) == n + 1\n )\n assert (\n len([r for r in runs.rows if r.workflow_id == bulk_replay_test_2.id])\n == n // 2 - 1\n )\n assert (\n len([r for r in runs.rows if r.workflow_id == bulk_replay_test_3.id])\n == n // 2 - 2\n )\n', - source: 'out/python/bulk_operations/test_bulk_replay.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/worker.ts deleted file mode 100644 index c418ad365..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/bulk_operations/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n@hatchet.task()\ndef bulk_replay_test_1(input: EmptyModel, ctx: Context) -> None:\n print("retrying bulk replay test task", ctx.retry_count)\n if ctx.retry_count == 0:\n raise ValueError("This is a test error to trigger a retry.")\n\n\n@hatchet.task()\ndef bulk_replay_test_2(input: EmptyModel, ctx: Context) -> None:\n print("retrying bulk replay test task", ctx.retry_count)\n if ctx.retry_count == 0:\n raise ValueError("This is a test error to trigger a retry.")\n\n\n@hatchet.task()\ndef bulk_replay_test_3(input: EmptyModel, ctx: Context) -> None:\n print("retrying bulk replay test task", ctx.retry_count)\n if ctx.retry_count == 0:\n raise ValueError("This is a test error to trigger a retry.")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "bulk-replay-test-worker",\n workflows=[bulk_replay_test_1, bulk_replay_test_2, bulk_replay_test_3],\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/bulk_operations/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/index.ts deleted file mode 100644 index 84ba324ef..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_cancellation from './test_cancellation'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_cancellation }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/test_cancellation.ts b/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/test_cancellation.ts deleted file mode 100644 index 6f6148c66..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/test_cancellation.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nimport pytest\n\nfrom examples.cancellation.worker import cancellation_workflow\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_cancellation(hatchet: Hatchet) -> None:\n ref = await cancellation_workflow.aio_run_no_wait()\n\n """Sleep for a long time since we only need cancellation to happen _eventually_"""\n await asyncio.sleep(10)\n\n for i in range(30):\n run = await hatchet.runs.aio_get(ref.workflow_run_id)\n\n if run.run.status == V1TaskStatus.RUNNING:\n await asyncio.sleep(1)\n continue\n\n assert run.run.status == V1TaskStatus.CANCELLED\n assert not run.run.output\n\n break\n else:\n assert False, "Workflow run did not cancel in time"\n', - source: 'out/python/cancellation/test_cancellation.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/trigger.ts deleted file mode 100644 index bc04923e9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\n\nfrom examples.cancellation.worker import cancellation_workflow, hatchet\n\nid = cancellation_workflow.run_no_wait()\n\ntime.sleep(5)\n\nhatchet.runs.cancel(id.workflow_run_id)\n', - source: 'out/python/cancellation/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/worker.ts deleted file mode 100644 index 7b7d73ee8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/cancellation/worker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nimport time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\ncancellation_workflow = hatchet.workflow(name="CancelWorkflow")\n\n\n# > Self-cancelling task\n@cancellation_workflow.task()\nasync def self_cancel(input: EmptyModel, ctx: Context) -> dict[str, str]:\n await asyncio.sleep(2)\n\n ## Cancel the task\n await ctx.aio_cancel()\n\n await asyncio.sleep(10)\n\n return {"error": "Task should have been cancelled"}\n\n\n\n\n# > Checking exit flag\n@cancellation_workflow.task()\ndef check_flag(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(3):\n time.sleep(1)\n\n # Note: Checking the status of the exit flag is mostly useful for cancelling\n # sync tasks without needing to forcibly kill the thread they\'re running on.\n if ctx.exit_flag:\n print("Task has been cancelled")\n raise ValueError("Task has been cancelled")\n\n return {"error": "Task should have been cancelled"}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("cancellation-worker", workflows=[cancellation_workflow])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/cancellation/worker.py', - blocks: { - self_cancelling_task: { - start: 12, - stop: 23, - }, - checking_exit_flag: { - start: 27, - stop: 40, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/child/bulk.ts b/frontend/app/src/next/lib/docs/generated/snips/python/child/bulk.ts deleted file mode 100644 index a9684e7e3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/child/bulk.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\n# > Running a Task\nfrom examples.child.worker import SimpleInput, child_task\n\nchild_task.run(SimpleInput(message="Hello, World!"))\n\n\nasync def main() -> None:\n # > Bulk Run a Task\n greetings = ["Hello, World!", "Hello, Moon!", "Hello, Mars!"]\n\n results = await child_task.aio_run_many(\n [\n # run each greeting as a task in parallel\n child_task.create_bulk_run_item(\n input=SimpleInput(message=greeting),\n )\n for greeting in greetings\n ]\n )\n\n # this will await all results and return a list of results\n print(results)\n\n # > Running Multiple Tasks\n result1 = child_task.aio_run(SimpleInput(message="Hello, World!"))\n result2 = child_task.aio_run(SimpleInput(message="Hello, Moon!"))\n\n # gather the results of the two tasks\n gather_results = await asyncio.gather(result1, result2)\n\n # print the results of the two tasks\n print(gather_results[0]["transformed_message"])\n print(gather_results[1]["transformed_message"])\n', - source: 'out/python/child/bulk.py', - blocks: { - running_a_task: { - start: 4, - stop: 6, - }, - bulk_run_a_task: { - start: 11, - stop: 24, - }, - running_multiple_tasks: { - start: 27, - stop: 35, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/child/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/child/index.ts deleted file mode 100644 index 750163313..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/child/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import bulk from './bulk'; -import simple_fanout from './simple-fanout'; -import trigger from './trigger'; -import worker from './worker'; - -export { bulk }; -export { simple_fanout }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/child/simple-fanout.ts b/frontend/app/src/next/lib/docs/generated/snips/python/child/simple-fanout.ts deleted file mode 100644 index 9c152c172..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/child/simple-fanout.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from typing import Any\n\nfrom examples.child.worker import SimpleInput, child_task\nfrom hatchet_sdk.context.context import Context\nfrom hatchet_sdk.hatchet import Hatchet\nfrom hatchet_sdk.runnables.types import EmptyModel\n\nhatchet = Hatchet(debug=True)\n\n\n# > Running a Task from within a Task\n@hatchet.task(name="SpawnTask")\nasync def spawn(input: EmptyModel, ctx: Context) -> dict[str, Any]:\n # Simply run the task with the input we received\n result = await child_task.aio_run(\n input=SimpleInput(message="Hello, World!"),\n )\n\n return {"results": result}\n\n\n', - source: 'out/python/child/simple-fanout.py', - blocks: { - running_a_task_from_within_a_task: { - start: 12, - stop: 21, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/child/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/child/trigger.ts deleted file mode 100644 index f082b2e04..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/child/trigger.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# ruff: noqa: E402\n\nimport asyncio\n\n# > Running a Task\nfrom examples.child.worker import SimpleInput, child_task\n\nchild_task.run(SimpleInput(message="Hello, World!"))\n\n# > Schedule a Task\nfrom datetime import datetime, timedelta, timezone\n\nchild_task.schedule(\n datetime.now(tz=timezone.utc) + timedelta(minutes=5),\n SimpleInput(message="Hello, World!"),\n)\n\n\nasync def main() -> None:\n # > Running a Task AIO\n result = await child_task.aio_run(SimpleInput(message="Hello, World!"))\n\n print(result)\n\n # > Running Multiple Tasks\n result1 = child_task.aio_run(SimpleInput(message="Hello, World!"))\n result2 = child_task.aio_run(SimpleInput(message="Hello, Moon!"))\n\n # gather the results of the two tasks\n results = await asyncio.gather(result1, result2)\n\n # print the results of the two tasks\n print(results[0]["transformed_message"])\n print(results[1]["transformed_message"])\n', - source: 'out/python/child/trigger.py', - blocks: { - running_a_task: { - start: 6, - stop: 8, - }, - schedule_a_task: { - start: 11, - stop: 16, - }, - running_a_task_aio: { - start: 21, - stop: 21, - }, - running_multiple_tasks: { - start: 26, - stop: 34, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/child/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/child/worker.ts deleted file mode 100644 index 0e233310b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/child/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Simple\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass SimpleInput(BaseModel):\n message: str\n\n\nclass SimpleOutput(BaseModel):\n transformed_message: str\n\n\nchild_task = hatchet.workflow(name="SimpleWorkflow", input_validator=SimpleInput)\n\n\n@child_task.task(name="step1")\ndef step1(input: SimpleInput, ctx: Context) -> SimpleOutput:\n print("executed step1: ", input.message)\n return SimpleOutput(transformed_message=input.message.upper())\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("test-worker", slots=1, workflows=[child_task])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/child/worker.py', - blocks: { - simple: { - start: 2, - stop: 26, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/index.ts deleted file mode 100644 index 245a4ee68..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/trigger.ts deleted file mode 100644 index 79e5a1084..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.concurrency_limit.worker import WorkflowInput, concurrency_limit_workflow\n\nconcurrency_limit_workflow.run(WorkflowInput(group_key="test", run=1))\n', - source: 'out/python/concurrency_limit/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/worker.ts deleted file mode 100644 index 1869d1e62..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\nfrom typing import Any\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\n# > Workflow\nclass WorkflowInput(BaseModel):\n run: int\n group_key: str\n\n\nconcurrency_limit_workflow = hatchet.workflow(\n name="ConcurrencyDemoWorkflow",\n concurrency=ConcurrencyExpression(\n expression="input.group_key",\n max_runs=5,\n limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS,\n ),\n input_validator=WorkflowInput,\n)\n\n\n\n@concurrency_limit_workflow.task()\ndef step1(input: WorkflowInput, ctx: Context) -> dict[str, Any]:\n time.sleep(3)\n print("executed step1")\n return {"run": input.run}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "concurrency-demo-worker", slots=10, workflows=[concurrency_limit_workflow]\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/concurrency_limit/worker.py', - blocks: { - workflow: { - start: 17, - stop: 31, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/index.ts deleted file mode 100644 index 88385957c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_concurrency_limit_rr from './test_concurrency_limit_rr'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_concurrency_limit_rr }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/test_concurrency_limit_rr.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/test_concurrency_limit_rr.ts deleted file mode 100644 index a58ee18bd..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/test_concurrency_limit_rr.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\n\nimport pytest\n\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom hatchet_sdk.workflow_run import WorkflowRunRef\n\n\n@pytest.mark.skip(reason="The timing for this test is not reliable")\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run() -> None:\n num_groups = 2\n runs: list[WorkflowRunRef] = []\n\n # Start all runs\n for i in range(1, num_groups + 1):\n run = concurrency_limit_rr_workflow.run_no_wait()\n runs.append(run)\n run = concurrency_limit_rr_workflow.run_no_wait()\n runs.append(run)\n\n # Wait for all results\n successful_runs = []\n cancelled_runs = []\n\n start_time = time.time()\n\n # Process each run individually\n for i, run in enumerate(runs, start=1):\n try:\n result = await run.aio_result()\n successful_runs.append((i, result))\n except Exception as e:\n if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e):\n cancelled_runs.append((i, str(e)))\n else:\n raise # Re-raise if it\'s an unexpected error\n\n end_time = time.time()\n total_time = end_time - start_time\n\n # Check that we have the correct number of successful and cancelled runs\n assert (\n len(successful_runs) == 4\n ), f"Expected 4 successful runs, got {len(successful_runs)}"\n assert (\n len(cancelled_runs) == 0\n ), f"Expected 0 cancelled run, got {len(cancelled_runs)}"\n\n # Check that the total time is close to 2 seconds\n assert (\n 3.8 <= total_time <= 7\n ), f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds"\n\n print(f"Total execution time: {total_time:.2f} seconds")\n', - source: 'out/python/concurrency_limit_rr/test_concurrency_limit_rr.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/trigger.ts deleted file mode 100644 index a26807efc..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.concurrency_limit_rr.worker import (\n WorkflowInput,\n concurrency_limit_rr_workflow,\n)\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\nfor i in range(200):\n group = "0"\n\n if i % 2 == 0:\n group = "1"\n\n concurrency_limit_rr_workflow.run(WorkflowInput(group=group))\n', - source: 'out/python/concurrency_limit_rr/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/worker.ts deleted file mode 100644 index 522cac6d1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\n# > Concurrency Strategy With Key\nclass WorkflowInput(BaseModel):\n group: str\n\n\nconcurrency_limit_rr_workflow = hatchet.workflow(\n name="ConcurrencyDemoWorkflowRR",\n concurrency=ConcurrencyExpression(\n expression="input.group",\n max_runs=1,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n input_validator=WorkflowInput,\n)\n\n\n@concurrency_limit_rr_workflow.task()\ndef step1(input: WorkflowInput, ctx: Context) -> None:\n print("starting step1")\n time.sleep(2)\n print("finished step1")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "concurrency-demo-worker-rr",\n slots=10,\n workflows=[concurrency_limit_rr_workflow],\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/concurrency_limit_rr/worker.py', - blocks: { - concurrency_strategy_with_key: { - start: 16, - stop: 28, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/event.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/event.ts deleted file mode 100644 index d75df487e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/event.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import random\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# Create a list of events with desired distribution\nevents = ["1"] * 10000 + ["0"] * 100\nrandom.shuffle(events)\n\n# Send the shuffled events\nfor group in events:\n hatchet.event.push("concurrency-test", {"group": group})\n', - source: 'out/python/concurrency_limit_rr_load/event.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/index.ts deleted file mode 100644 index 5d82219b0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import event from './event'; -import worker from './worker'; - -export { event }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/worker.ts deleted file mode 100644 index b1205bdaa..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_limit_rr_load/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import random\nimport time\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\nclass LoadRRInput(BaseModel):\n group: str\n\n\nload_rr_workflow = hatchet.workflow(\n name="LoadRoundRobin",\n on_events=["concurrency-test"],\n concurrency=ConcurrencyExpression(\n expression="input.group",\n max_runs=1,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n input_validator=LoadRRInput,\n)\n\n\n@load_rr_workflow.on_failure_task()\ndef on_failure(input: LoadRRInput, context: Context) -> dict[str, str]:\n print("on_failure")\n return {"on_failure": "on_failure"}\n\n\n@load_rr_workflow.task()\ndef step1(input: LoadRRInput, context: Context) -> dict[str, str]:\n print("starting step1")\n time.sleep(random.randint(2, 20))\n print("finished step1")\n return {"step1": "step1"}\n\n\n@load_rr_workflow.task(\n retries=3,\n backoff_factor=5,\n backoff_max_seconds=60,\n)\ndef step2(sinput: LoadRRInput, context: Context) -> dict[str, str]:\n print("starting step2")\n if random.random() < 0.5: # 1% chance of failure\n raise Exception("Random failure in step2")\n time.sleep(2)\n print("finished step2")\n return {"step2": "step2"}\n\n\n@load_rr_workflow.task()\ndef step3(input: LoadRRInput, context: Context) -> dict[str, str]:\n print("starting step3")\n time.sleep(0.2)\n print("finished step3")\n return {"step3": "step3"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "concurrency-demo-worker-rr", slots=50, workflows=[load_rr_workflow]\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/concurrency_limit_rr_load/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/index.ts deleted file mode 100644 index 94640bd3c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_multiple_concurrency_keys from './test_multiple_concurrency_keys'; -import worker from './worker'; - -export { test_multiple_concurrency_keys }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/test_multiple_concurrency_keys.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/test_multiple_concurrency_keys.ts deleted file mode 100644 index 37e498273..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/test_multiple_concurrency_keys.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom collections import Counter\nfrom datetime import datetime\nfrom random import choice\nfrom typing import Literal\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.concurrency_multiple_keys.worker import (\n DIGIT_MAX_RUNS,\n NAME_MAX_RUNS,\n WorkflowInput,\n concurrency_multiple_keys_workflow,\n)\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\n\nCharacter = Literal["Anna", "Vronsky", "Stiva", "Dolly", "Levin", "Karenin"]\ncharacters: list[Character] = [\n "Anna",\n "Vronsky",\n "Stiva",\n "Dolly",\n "Levin",\n "Karenin",\n]\n\n\nclass RunMetadata(BaseModel):\n test_run_id: str\n key: str\n name: Character\n digit: str\n started_at: datetime\n finished_at: datetime\n\n @staticmethod\n def parse(task: V1TaskSummary) -> "RunMetadata":\n return RunMetadata(\n test_run_id=task.additional_metadata["test_run_id"], # type: ignore\n key=task.additional_metadata["key"], # type: ignore\n name=task.additional_metadata["name"], # type: ignore\n digit=task.additional_metadata["digit"], # type: ignore\n started_at=task.started_at or datetime.max,\n finished_at=task.finished_at or datetime.min,\n )\n\n def __str__(self) -> str:\n return self.key\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_multi_concurrency_key(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n\n run_refs = await concurrency_multiple_keys_workflow.aio_run_many_no_wait(\n [\n concurrency_multiple_keys_workflow.create_bulk_run_item(\n WorkflowInput(\n name=(name := choice(characters)),\n digit=(digit := choice([str(i) for i in range(6)])),\n ),\n options=TriggerWorkflowOptions(\n additional_metadata={\n "test_run_id": test_run_id,\n "key": f"{name}-{digit}",\n "name": name,\n "digit": digit,\n },\n ),\n )\n for _ in range(100)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(\n workflow_name=concurrency_multiple_keys_workflow.name,\n limit=1_000,\n )\n ).rows\n\n assert workflows\n\n workflow = next(\n (w for w in workflows if w.name == concurrency_multiple_keys_workflow.name),\n None,\n )\n\n assert workflow\n\n assert workflow.name == concurrency_multiple_keys_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n "test_run_id": test_run_id,\n },\n limit=1_000,\n )\n\n sorted_runs = sorted(\n [RunMetadata.parse(r) for r in runs.rows], key=lambda r: r.started_at\n )\n\n overlapping_groups: dict[int, list[RunMetadata]] = {}\n\n for run in sorted_runs:\n has_group_membership = False\n\n if not overlapping_groups:\n overlapping_groups[1] = [run]\n continue\n\n if has_group_membership:\n continue\n\n for id, group in overlapping_groups.items():\n if all(are_overlapping(run, task) for task in group):\n overlapping_groups[id].append(run)\n has_group_membership = True\n break\n\n if not has_group_membership:\n overlapping_groups[len(overlapping_groups) + 1] = [run]\n\n assert {s.key for s in sorted_runs} == {\n k.key for v in overlapping_groups.values() for k in v\n }\n\n for id, group in overlapping_groups.items():\n assert is_valid_group(group), f"Group {id} is not valid"\n\n\ndef are_overlapping(x: RunMetadata, y: RunMetadata) -> bool:\n return (x.started_at < y.finished_at and x.finished_at > y.started_at) or (\n x.finished_at > y.started_at and x.started_at < y.finished_at\n )\n\n\ndef is_valid_group(group: list[RunMetadata]) -> bool:\n digits = Counter[str]()\n names = Counter[str]()\n\n for task in group:\n digits[task.digit] += 1\n names[task.name] += 1\n\n if any(v > DIGIT_MAX_RUNS for v in digits.values()):\n return False\n\n if any(v > NAME_MAX_RUNS for v in names.values()):\n return False\n\n return True\n', - source: - 'out/python/concurrency_multiple_keys/test_multiple_concurrency_keys.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/worker.ts deleted file mode 100644 index c4ec956b4..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_multiple_keys/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\nSLEEP_TIME = 2\nDIGIT_MAX_RUNS = 8\nNAME_MAX_RUNS = 3\n\n\n# > Concurrency Strategy With Key\nclass WorkflowInput(BaseModel):\n name: str\n digit: str\n\n\nconcurrency_multiple_keys_workflow = hatchet.workflow(\n name="ConcurrencyWorkflowManyKeys",\n input_validator=WorkflowInput,\n)\n\n\n@concurrency_multiple_keys_workflow.task(\n concurrency=[\n ConcurrencyExpression(\n expression="input.digit",\n max_runs=DIGIT_MAX_RUNS,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n ConcurrencyExpression(\n expression="input.name",\n max_runs=NAME_MAX_RUNS,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n ]\n)\nasync def concurrency_task(input: WorkflowInput, ctx: Context) -> None:\n await asyncio.sleep(SLEEP_TIME)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "concurrency-worker-multiple-keys",\n slots=10,\n workflows=[concurrency_multiple_keys_workflow],\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/concurrency_multiple_keys/worker.py', - blocks: { - concurrency_strategy_with_key: { - start: 20, - stop: 28, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/index.ts deleted file mode 100644 index bd7a70141..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_workflow_level_concurrency from './test_workflow_level_concurrency'; -import worker from './worker'; - -export { test_workflow_level_concurrency }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/test_workflow_level_concurrency.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/test_workflow_level_concurrency.ts deleted file mode 100644 index 66c8121d0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/test_workflow_level_concurrency.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom collections import Counter\nfrom datetime import datetime\nfrom random import choice\nfrom typing import Literal\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.concurrency_workflow_level.worker import (\n DIGIT_MAX_RUNS,\n NAME_MAX_RUNS,\n WorkflowInput,\n concurrency_workflow_level_workflow,\n)\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\n\nCharacter = Literal["Anna", "Vronsky", "Stiva", "Dolly", "Levin", "Karenin"]\ncharacters: list[Character] = [\n "Anna",\n "Vronsky",\n "Stiva",\n "Dolly",\n "Levin",\n "Karenin",\n]\n\n\nclass RunMetadata(BaseModel):\n test_run_id: str\n key: str\n name: Character\n digit: str\n started_at: datetime\n finished_at: datetime\n\n @staticmethod\n def parse(task: V1TaskSummary) -> "RunMetadata":\n return RunMetadata(\n test_run_id=task.additional_metadata["test_run_id"], # type: ignore\n key=task.additional_metadata["key"], # type: ignore\n name=task.additional_metadata["name"], # type: ignore\n digit=task.additional_metadata["digit"], # type: ignore\n started_at=task.started_at or datetime.max,\n finished_at=task.finished_at or datetime.min,\n )\n\n def __str__(self) -> str:\n return self.key\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_workflow_level_concurrency(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n\n run_refs = await concurrency_workflow_level_workflow.aio_run_many_no_wait(\n [\n concurrency_workflow_level_workflow.create_bulk_run_item(\n WorkflowInput(\n name=(name := choice(characters)),\n digit=(digit := choice([str(i) for i in range(6)])),\n ),\n options=TriggerWorkflowOptions(\n additional_metadata={\n "test_run_id": test_run_id,\n "key": f"{name}-{digit}",\n "name": name,\n "digit": digit,\n },\n ),\n )\n for _ in range(100)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(\n workflow_name=concurrency_workflow_level_workflow.name,\n limit=1_000,\n )\n ).rows\n\n assert workflows\n\n workflow = next(\n (w for w in workflows if w.name == concurrency_workflow_level_workflow.name),\n None,\n )\n\n assert workflow\n\n assert workflow.name == concurrency_workflow_level_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n "test_run_id": test_run_id,\n },\n limit=1_000,\n )\n\n sorted_runs = sorted(\n [RunMetadata.parse(r) for r in runs.rows], key=lambda r: r.started_at\n )\n\n overlapping_groups: dict[int, list[RunMetadata]] = {}\n\n for run in sorted_runs:\n has_group_membership = False\n\n if not overlapping_groups:\n overlapping_groups[1] = [run]\n continue\n\n if has_group_membership:\n continue\n\n for id, group in overlapping_groups.items():\n if all(are_overlapping(run, task) for task in group):\n overlapping_groups[id].append(run)\n has_group_membership = True\n break\n\n if not has_group_membership:\n overlapping_groups[len(overlapping_groups) + 1] = [run]\n\n for id, group in overlapping_groups.items():\n assert is_valid_group(group), f"Group {id} is not valid"\n\n\ndef are_overlapping(x: RunMetadata, y: RunMetadata) -> bool:\n return (x.started_at < y.finished_at and x.finished_at > y.started_at) or (\n x.finished_at > y.started_at and x.started_at < y.finished_at\n )\n\n\ndef is_valid_group(group: list[RunMetadata]) -> bool:\n digits = Counter[str]()\n names = Counter[str]()\n\n for task in group:\n digits[task.digit] += 1\n names[task.name] += 1\n\n if any(v > DIGIT_MAX_RUNS for v in digits.values()):\n return False\n\n if any(v > NAME_MAX_RUNS for v in names.values()):\n return False\n\n return True\n', - source: - 'out/python/concurrency_workflow_level/test_workflow_level_concurrency.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/worker.ts deleted file mode 100644 index b0726b64f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/concurrency_workflow_level/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\nSLEEP_TIME = 2\nDIGIT_MAX_RUNS = 8\nNAME_MAX_RUNS = 3\n\n\n# > Multiple Concurrency Keys\nclass WorkflowInput(BaseModel):\n name: str\n digit: str\n\n\nconcurrency_workflow_level_workflow = hatchet.workflow(\n name="ConcurrencyWorkflowManyKeys",\n input_validator=WorkflowInput,\n concurrency=[\n ConcurrencyExpression(\n expression="input.digit",\n max_runs=DIGIT_MAX_RUNS,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n ConcurrencyExpression(\n expression="input.name",\n max_runs=NAME_MAX_RUNS,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n ],\n)\n\n\n@concurrency_workflow_level_workflow.task()\nasync def task_1(input: WorkflowInput, ctx: Context) -> None:\n await asyncio.sleep(SLEEP_TIME)\n\n\n@concurrency_workflow_level_workflow.task()\nasync def task_2(input: WorkflowInput, ctx: Context) -> None:\n await asyncio.sleep(SLEEP_TIME)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "concurrency-worker-workflow-level",\n slots=10,\n workflows=[concurrency_workflow_level_workflow],\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/concurrency_workflow_level/worker.py', - blocks: { - multiple_concurrency_keys: { - start: 20, - stop: 40, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/conditions/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/conditions/index.ts deleted file mode 100644 index 299d7d6e0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/conditions/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_conditions from './test_conditions'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_conditions }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/conditions/test_conditions.ts b/frontend/app/src/next/lib/docs/generated/snips/python/conditions/test_conditions.ts deleted file mode 100644 index 5f5cc0f79..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/conditions/test_conditions.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nimport pytest\n\nfrom examples.conditions.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_waits(hatchet: Hatchet) -> None:\n ref = task_condition_workflow.run_no_wait()\n\n await asyncio.sleep(15)\n\n hatchet.event.push("skip_on_event:skip", {})\n hatchet.event.push("wait_for_event:start", {})\n\n result = await ref.aio_result()\n\n assert result["skip_on_event"] == {"skipped": True}\n\n first_random_number = result["start"]["random_number"]\n wait_for_event_random_number = result["wait_for_event"]["random_number"]\n wait_for_sleep_random_number = result["wait_for_sleep"]["random_number"]\n\n left_branch = result["left_branch"]\n right_branch = result["right_branch"]\n\n assert left_branch.get("skipped") is True or right_branch.get("skipped") is True\n\n skip_with_multiple_parents = result["skip_with_multiple_parents"]\n\n assert skip_with_multiple_parents.get("skipped") is True\n\n branch_random_number = left_branch.get("random_number") or right_branch.get(\n "random_number"\n )\n\n result_sum = result["sum"]["sum"]\n\n assert (\n result_sum\n == first_random_number\n + wait_for_event_random_number\n + wait_for_sleep_random_number\n + branch_random_number\n )\n', - source: 'out/python/conditions/test_conditions.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/conditions/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/conditions/trigger.ts deleted file mode 100644 index 71a3924af..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/conditions/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\n\nfrom examples.conditions.worker import hatchet, task_condition_workflow\n\ntask_condition_workflow.run_no_wait()\n\ntime.sleep(5)\n\nhatchet.event.push("skip_on_event:skip", {})\nhatchet.event.push("wait_for_event:start", {})\n', - source: 'out/python/conditions/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/conditions/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/conditions/worker.ts deleted file mode 100644 index a01e7f326..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/conditions/worker.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Create a workflow\n\nimport random\nfrom datetime import timedelta\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n Context,\n EmptyModel,\n Hatchet,\n ParentCondition,\n SleepCondition,\n UserEventCondition,\n or_,\n)\n\nhatchet = Hatchet(debug=True)\n\n\nclass StepOutput(BaseModel):\n random_number: int\n\n\nclass RandomSum(BaseModel):\n sum: int\n\n\ntask_condition_workflow = hatchet.workflow(name="TaskConditionWorkflow")\n\n\n\n# > Add base task\n@task_condition_workflow.task()\ndef start(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add wait for sleep\n@task_condition_workflow.task(\n parents=[start], wait_for=[SleepCondition(timedelta(seconds=10))]\n)\ndef wait_for_sleep(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add skip condition override\n@task_condition_workflow.task(\n parents=[start, wait_for_sleep],\n skip_if=[ParentCondition(parent=start, expression="output.random_number > 0")],\n)\ndef skip_with_multiple_parents(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add skip on event\n@task_condition_workflow.task(\n parents=[start],\n wait_for=[SleepCondition(timedelta(seconds=30))],\n skip_if=[UserEventCondition(event_key="skip_on_event:skip")],\n)\ndef skip_on_event(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add branching\n@task_condition_workflow.task(\n parents=[wait_for_sleep],\n skip_if=[\n ParentCondition(\n parent=wait_for_sleep,\n expression="output.random_number > 50",\n )\n ],\n)\ndef left_branch(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@task_condition_workflow.task(\n parents=[wait_for_sleep],\n skip_if=[\n ParentCondition(\n parent=wait_for_sleep,\n expression="output.random_number <= 50",\n )\n ],\n)\ndef right_branch(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add wait for event\n@task_condition_workflow.task(\n parents=[start],\n wait_for=[\n or_(\n SleepCondition(duration=timedelta(minutes=1)),\n UserEventCondition(event_key="wait_for_event:start"),\n )\n ],\n)\ndef wait_for_event(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add sum\n@task_condition_workflow.task(\n parents=[\n start,\n wait_for_sleep,\n wait_for_event,\n skip_on_event,\n left_branch,\n right_branch,\n ],\n)\ndef sum(input: EmptyModel, ctx: Context) -> RandomSum:\n one = ctx.task_output(start).random_number\n two = ctx.task_output(wait_for_event).random_number\n three = ctx.task_output(wait_for_sleep).random_number\n four = (\n ctx.task_output(skip_on_event).random_number\n if not ctx.was_skipped(skip_on_event)\n else 0\n )\n\n five = (\n ctx.task_output(left_branch).random_number\n if not ctx.was_skipped(left_branch)\n else 0\n )\n six = (\n ctx.task_output(right_branch).random_number\n if not ctx.was_skipped(right_branch)\n else 0\n )\n\n return RandomSum(sum=one + two + three + four + five + six)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("dag-worker", workflows=[task_condition_workflow])\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/conditions/worker.py', - blocks: { - create_a_workflow: { - start: 2, - stop: 30, - }, - add_base_task: { - start: 34, - stop: 38, - }, - add_wait_for_sleep: { - start: 42, - stop: 48, - }, - add_skip_condition_override: { - start: 52, - stop: 59, - }, - add_skip_on_event: { - start: 63, - stop: 71, - }, - add_branching: { - start: 75, - stop: 100, - }, - add_wait_for_event: { - start: 104, - stop: 116, - }, - add_sum: { - start: 120, - stop: 153, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/cron/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/cron/index.ts deleted file mode 100644 index c468b44a1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/cron/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import programatic_async from './programatic-async'; -import programatic_sync from './programatic-sync'; -import workflow_definition from './workflow-definition'; - -export { programatic_async }; -export { programatic_sync }; -export { workflow_definition }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/cron/programatic-async.ts b/frontend/app/src/next/lib/docs/generated/snips/python/cron/programatic-async.ts deleted file mode 100644 index db47d7f08..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/cron/programatic-async.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from pydantic import BaseModel\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n\nclass DynamicCronInput(BaseModel):\n name: str\n\n\nasync def create_cron() -> None:\n dynamic_cron_workflow = hatchet.workflow(\n name="CronWorkflow", input_validator=DynamicCronInput\n )\n\n # > Create\n cron_trigger = await dynamic_cron_workflow.aio_create_cron(\n cron_name="customer-a-daily-report",\n expression="0 12 * * *",\n input=DynamicCronInput(name="John Doe"),\n additional_metadata={\n "customer_id": "customer-a",\n },\n )\n\n cron_trigger.metadata.id # the id of the cron trigger\n\n # > List\n await hatchet.cron.aio_list()\n\n # > Get\n cron_trigger = await hatchet.cron.aio_get(cron_id=cron_trigger.metadata.id)\n\n # > Delete\n await hatchet.cron.aio_delete(cron_id=cron_trigger.metadata.id)\n', - source: 'out/python/cron/programatic-async.py', - blocks: { - create: { - start: 18, - stop: 27, - }, - list: { - start: 30, - stop: 30, - }, - get: { - start: 33, - stop: 33, - }, - delete: { - start: 36, - stop: 36, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/cron/programatic-sync.ts b/frontend/app/src/next/lib/docs/generated/snips/python/cron/programatic-sync.ts deleted file mode 100644 index 7d95ad4f1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/cron/programatic-sync.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from pydantic import BaseModel\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n\nclass DynamicCronInput(BaseModel):\n name: str\n\n\ndynamic_cron_workflow = hatchet.workflow(\n name="CronWorkflow", input_validator=DynamicCronInput\n)\n\n# > Create\ncron_trigger = dynamic_cron_workflow.create_cron(\n cron_name="customer-a-daily-report",\n expression="0 12 * * *",\n input=DynamicCronInput(name="John Doe"),\n additional_metadata={\n "customer_id": "customer-a",\n },\n)\n\n\nid = cron_trigger.metadata.id # the id of the cron trigger\n\n# > List\ncron_triggers = hatchet.cron.list()\n\n# > Get\ncron_trigger = hatchet.cron.get(cron_id=cron_trigger.metadata.id)\n\n# > Delete\nhatchet.cron.delete(cron_id=cron_trigger.metadata.id)\n', - source: 'out/python/cron/programatic-sync.py', - blocks: { - create: { - start: 17, - stop: 27, - }, - list: { - start: 30, - stop: 30, - }, - get: { - start: 33, - stop: 33, - }, - delete: { - start: 36, - stop: 36, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/cron/workflow-definition.ts b/frontend/app/src/next/lib/docs/generated/snips/python/cron/workflow-definition.ts deleted file mode 100644 index 3ac8e3422..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/cron/workflow-definition.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Workflow Definition Cron Trigger\n# Adding a cron trigger to a workflow is as simple\n# as adding a `cron expression` to the `on_cron`\n# prop of the workflow definition\n\ncron_workflow = hatchet.workflow(name="CronWorkflow", on_crons=["* * * * *"])\n\n\n@cron_workflow.task()\ndef step1(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {\n "time": "step1",\n }\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("test-worker", slots=1, workflows=[cron_workflow])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/cron/workflow-definition.py', - blocks: { - workflow_definition_cron_trigger: { - start: 7, - stop: 20, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dag/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dag/index.ts deleted file mode 100644 index 3643e4da8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dag/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_dag from './test_dag'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_dag }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dag/test_dag.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dag/test_dag.ts deleted file mode 100644 index 4b690cdd8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dag/test_dag.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import pytest\n\nfrom examples.dag.worker import dag_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run(hatchet: Hatchet) -> None:\n result = await dag_workflow.aio_run()\n\n one = result["step1"]["random_number"]\n two = result["step2"]["random_number"]\n assert result["step3"]["sum"] == one + two\n assert result["step4"]["step4"] == "step4"\n', - source: 'out/python/dag/test_dag.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dag/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dag/trigger.ts deleted file mode 100644 index 080fb3393..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dag/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.dag.worker import dag_workflow\n\ndag_workflow.run()\n', - source: 'out/python/dag/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dag/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dag/worker.ts deleted file mode 100644 index f550543c6..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dag/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import random\nimport time\nfrom datetime import timedelta\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\n\nclass StepOutput(BaseModel):\n random_number: int\n\n\nclass RandomSum(BaseModel):\n sum: int\n\n\nhatchet = Hatchet(debug=True)\n\ndag_workflow = hatchet.workflow(name="DAGWorkflow")\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\ndef step1(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\nasync def step2(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(parents=[step1, step2])\nasync def step3(input: EmptyModel, ctx: Context) -> RandomSum:\n one = ctx.task_output(step1).random_number\n two = ctx.task_output(step2).random_number\n\n return RandomSum(sum=one + two)\n\n\n@dag_workflow.task(parents=[step1, step3])\nasync def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\n "executed step4",\n time.strftime("%H:%M:%S", time.localtime()),\n input,\n ctx.task_output(step1),\n ctx.task_output(step3),\n )\n return {\n "step4": "step4",\n }\n\n\ndef main() -> None:\n worker = hatchet.worker("dag-worker", workflows=[dag_workflow])\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/dag/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dedupe/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dedupe/index.ts deleted file mode 100644 index c1b323985..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dedupe/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dedupe/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dedupe/worker.ts deleted file mode 100644 index 781698426..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dedupe/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom datetime import timedelta\nfrom typing import Any\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.exceptions import DedupeViolationError\n\nhatchet = Hatchet(debug=True)\n\ndedupe_parent_wf = hatchet.workflow(name="DedupeParent")\ndedupe_child_wf = hatchet.workflow(name="DedupeChild")\n\n\n@dedupe_parent_wf.task(execution_timeout=timedelta(minutes=1))\nasync def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:\n print("spawning child")\n\n results = []\n\n for i in range(2):\n try:\n results.append(\n dedupe_child_wf.aio_run(\n options=TriggerWorkflowOptions(\n additional_metadata={"dedupe": "test"}, key=f"child{i}"\n ),\n )\n )\n except DedupeViolationError as e:\n print(f"dedupe violation {e}")\n continue\n\n result = await asyncio.gather(*results)\n print(f"results {result}")\n\n return {"results": result}\n\n\n@dedupe_child_wf.task()\nasync def process(input: EmptyModel, ctx: Context) -> dict[str, str]:\n await asyncio.sleep(3)\n\n print("child process")\n return {"status": "success"}\n\n\n@dedupe_child_wf.task()\nasync def process2(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print("child process2")\n return {"status2": "success"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "fanout-worker", slots=100, workflows=[dedupe_parent_wf, dedupe_child_wf]\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/dedupe/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/delayed/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/delayed/index.ts deleted file mode 100644 index 9d01f2829..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/delayed/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_delayed from './test_delayed'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_delayed }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/delayed/test_delayed.ts b/frontend/app/src/next/lib/docs/generated/snips/python/delayed/test_delayed.ts deleted file mode 100644 index ad1767550..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/delayed/test_delayed.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# from hatchet_sdk import Hatchet\n# import pytest\n\n# from tests.utils import fixture_bg_worker\n\n\n# worker = fixture_bg_worker(["poetry", "run", "manual_trigger"])\n\n# # @pytest.mark.asyncio(loop_scope="session")\n# async def test_run(hatchet: Hatchet):\n# # TODO\n', - source: 'out/python/delayed/test_delayed.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/delayed/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/delayed/trigger.ts deleted file mode 100644 index 1410aa641..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/delayed/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.delayed.worker import PrinterInput, print_schedule_wf\n\nprint_schedule_wf.run(PrinterInput(message="test"))\n', - source: 'out/python/delayed/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/delayed/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/delayed/worker.ts deleted file mode 100644 index 990e7876b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/delayed/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from datetime import datetime, timedelta, timezone\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass PrinterInput(BaseModel):\n message: str\n\n\nprint_schedule_wf = hatchet.workflow(\n name="PrintScheduleWorkflow",\n input_validator=PrinterInput,\n)\nprint_printer_wf = hatchet.workflow(\n name="PrintPrinterWorkflow", input_validator=PrinterInput\n)\n\n\n@print_schedule_wf.task()\ndef schedule(input: PrinterInput, ctx: Context) -> None:\n now = datetime.now(tz=timezone.utc)\n print(f"the time is \\t {now.strftime(\'%H:%M:%S\')}")\n future_time = now + timedelta(seconds=15)\n print(f"scheduling for \\t {future_time.strftime(\'%H:%M:%S\')}")\n\n print_printer_wf.schedule(future_time, input=input)\n\n\n@print_schedule_wf.task()\ndef step1(input: PrinterInput, ctx: Context) -> None:\n now = datetime.now(tz=timezone.utc)\n print(f"printed at \\t {now.strftime(\'%H:%M:%S\')}")\n print(f"message \\t {input.message}")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "delayed-worker", slots=4, workflows=[print_schedule_wf, print_printer_wf]\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/delayed/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/index.ts deleted file mode 100644 index 53c740d97..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_dependency_injection from './test_dependency_injection'; -import worker from './worker'; - -export { test_dependency_injection }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/test_dependency_injection.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/test_dependency_injection.ts deleted file mode 100644 index 08b6a8de2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/test_dependency_injection.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import pytest\n\nfrom examples.dependency_injection.worker import (\n ASYNC_DEPENDENCY_VALUE,\n SYNC_DEPENDENCY_VALUE,\n Output,\n async_dep,\n async_task_with_dependencies,\n di_workflow,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n sync_dep,\n sync_task_with_dependencies,\n)\nfrom hatchet_sdk import EmptyModel\nfrom hatchet_sdk.runnables.workflow import Standalone\n\n\n@pytest.mark.parametrize(\n "task",\n [\n async_task_with_dependencies,\n sync_task_with_dependencies,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n ],\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_di_standalones(\n task: Standalone[EmptyModel, Output],\n) -> None:\n result = await task.aio_run()\n\n assert isinstance(result, Output)\n assert result.sync_dep == SYNC_DEPENDENCY_VALUE\n assert result.async_dep == ASYNC_DEPENDENCY_VALUE\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_di_workflows() -> None:\n result = await di_workflow.aio_run()\n\n assert len(result) == 4\n\n for output in result.values():\n parsed = Output.model_validate(output)\n\n assert parsed.sync_dep == SYNC_DEPENDENCY_VALUE\n assert parsed.async_dep == ASYNC_DEPENDENCY_VALUE\n', - source: 'out/python/dependency_injection/test_dependency_injection.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/worker.ts deleted file mode 100644 index 0ba97428d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dependency_injection/worker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Simple\n\nfrom typing import Annotated\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Depends, DurableContext, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=False)\n\nSYNC_DEPENDENCY_VALUE = "sync_dependency_value"\nASYNC_DEPENDENCY_VALUE = "async_dependency_value"\n\n\n# > Declare dependencies\nasync def async_dep(input: EmptyModel, ctx: Context) -> str:\n return ASYNC_DEPENDENCY_VALUE\n\n\ndef sync_dep(input: EmptyModel, ctx: Context) -> str:\n return SYNC_DEPENDENCY_VALUE\n\n\n\n\nclass Output(BaseModel):\n sync_dep: str\n async_dep: str\n\n\n# > Inject dependencies\n@hatchet.task()\nasync def async_task_with_dependencies(\n _i: EmptyModel,\n ctx: Context,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n\n\n@hatchet.task()\ndef sync_task_with_dependencies(\n _i: EmptyModel,\n ctx: Context,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@hatchet.durable_task()\nasync def durable_async_task_with_dependencies(\n _i: EmptyModel,\n ctx: DurableContext,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@hatchet.durable_task()\ndef durable_sync_task_with_dependencies(\n _i: EmptyModel,\n ctx: DurableContext,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\ndi_workflow = hatchet.workflow(\n name="dependency-injection-workflow",\n)\n\n\n@di_workflow.task()\nasync def wf_async_task_with_dependencies(\n _i: EmptyModel,\n ctx: Context,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@di_workflow.task()\ndef wf_sync_task_with_dependencies(\n _i: EmptyModel,\n ctx: Context,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@di_workflow.durable_task()\nasync def wf_durable_async_task_with_dependencies(\n _i: EmptyModel,\n ctx: DurableContext,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@di_workflow.durable_task()\ndef wf_durable_sync_task_with_dependencies(\n _i: EmptyModel,\n ctx: DurableContext,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "dependency-injection-worker",\n workflows=[\n async_task_with_dependencies,\n sync_task_with_dependencies,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n di_workflow,\n ],\n )\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/dependency_injection/worker.py', - blocks: { - declare_dependencies: { - start: 16, - stop: 23, - }, - inject_dependencies: { - start: 32, - stop: 44, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable/index.ts deleted file mode 100644 index af17e8b66..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_durable from './test_durable'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_durable }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable/test_durable.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable/test_durable.ts deleted file mode 100644 index 12bbaf42a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable/test_durable.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nimport pytest\n\nfrom examples.durable.worker import (\n EVENT_KEY,\n SLEEP_TIME,\n durable_workflow,\n wait_for_sleep_twice,\n)\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_durable(hatchet: Hatchet) -> None:\n ref = durable_workflow.run_no_wait()\n\n await asyncio.sleep(SLEEP_TIME + 10)\n\n hatchet.event.push(EVENT_KEY, {"test": "test"})\n\n result = await ref.aio_result()\n\n workers = await hatchet.workers.aio_list()\n\n assert workers.rows\n\n active_workers = [w for w in workers.rows if w.status == "ACTIVE"]\n\n assert len(active_workers) == 2\n assert any(\n w.name == hatchet.config.apply_namespace("e2e-test-worker")\n for w in active_workers\n )\n assert any(\n w.name == hatchet.config.apply_namespace("e2e-test-worker_durable")\n for w in active_workers\n )\n\n assert result["durable_task"]["status"] == "success"\n\n wait_group_1 = result["wait_for_or_group_1"]\n wait_group_2 = result["wait_for_or_group_2"]\n\n assert abs(wait_group_1["runtime"] - SLEEP_TIME) < 3\n\n assert wait_group_1["key"] == wait_group_2["key"]\n assert wait_group_1["key"] == "CREATE"\n assert "sleep" in wait_group_1["event_id"]\n assert "event" in wait_group_2["event_id"]\n\n wait_for_multi_sleep = result["wait_for_multi_sleep"]\n\n assert wait_for_multi_sleep["runtime"] > 3 * SLEEP_TIME\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_durable_sleep_cancel_replay(hatchet: Hatchet) -> None:\n first_sleep = await wait_for_sleep_twice.aio_run_no_wait()\n\n await asyncio.sleep(SLEEP_TIME / 2)\n\n await hatchet.runs.aio_cancel(first_sleep.workflow_run_id)\n\n await first_sleep.aio_result()\n\n await hatchet.runs.aio_replay(\n first_sleep.workflow_run_id,\n )\n\n second_sleep_result = await first_sleep.aio_result()\n\n """We\'ve already slept for a little bit by the time the task is cancelled"""\n assert second_sleep_result["runtime"] <= SLEEP_TIME\n', - source: 'out/python/durable/test_durable.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable/trigger.ts deleted file mode 100644 index 805ed58f8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\n\nfrom examples.durable.worker import (\n EVENT_KEY,\n SLEEP_TIME,\n durable_workflow,\n ephemeral_workflow,\n hatchet,\n)\n\ndurable_workflow.run_no_wait()\nephemeral_workflow.run_no_wait()\n\nprint("Sleeping")\ntime.sleep(SLEEP_TIME + 2)\n\nprint("Pushing event")\nhatchet.event.push(EVENT_KEY, {})\n', - source: 'out/python/durable/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable/worker.ts deleted file mode 100644 index 71f8095ac..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable/worker.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nimport time\nfrom datetime import timedelta\nfrom uuid import uuid4\n\nfrom hatchet_sdk import (\n Context,\n DurableContext,\n EmptyModel,\n Hatchet,\n SleepCondition,\n UserEventCondition,\n or_,\n)\n\nhatchet = Hatchet(debug=True)\n\n# > Create a durable workflow\ndurable_workflow = hatchet.workflow(name="DurableWorkflow")\n\n\nephemeral_workflow = hatchet.workflow(name="EphemeralWorkflow")\n\n\n# > Add durable task\nEVENT_KEY = "durable-example:event"\nSLEEP_TIME = 5\n\n\n@durable_workflow.task()\nasync def ephemeral_task(input: EmptyModel, ctx: Context) -> None:\n print("Running non-durable task")\n\n\n@durable_workflow.durable_task()\nasync def durable_task(input: EmptyModel, ctx: DurableContext) -> dict[str, str]:\n print("Waiting for sleep")\n await ctx.aio_sleep_for(duration=timedelta(seconds=SLEEP_TIME))\n print("Sleep finished")\n\n print("Waiting for event")\n await ctx.aio_wait_for(\n "event",\n UserEventCondition(event_key=EVENT_KEY, expression="true"),\n )\n print("Event received")\n\n return {\n "status": "success",\n }\n\n\n\n\n# > Add durable tasks that wait for or groups\n\n\n@durable_workflow.durable_task()\nasync def wait_for_or_group_1(\n _i: EmptyModel, ctx: DurableContext\n) -> dict[str, str | int]:\n start = time.time()\n wait_result = await ctx.aio_wait_for(\n uuid4().hex,\n or_(\n SleepCondition(timedelta(seconds=SLEEP_TIME)),\n UserEventCondition(event_key=EVENT_KEY),\n ),\n )\n\n key = list(wait_result.keys())[0]\n event_id = list(wait_result[key].keys())[0]\n\n return {\n "runtime": int(time.time() - start),\n "key": key,\n "event_id": event_id,\n }\n\n\n\n\n@durable_workflow.durable_task()\nasync def wait_for_or_group_2(\n _i: EmptyModel, ctx: DurableContext\n) -> dict[str, str | int]:\n start = time.time()\n wait_result = await ctx.aio_wait_for(\n uuid4().hex,\n or_(\n SleepCondition(timedelta(seconds=6 * SLEEP_TIME)),\n UserEventCondition(event_key=EVENT_KEY),\n ),\n )\n\n key = list(wait_result.keys())[0]\n event_id = list(wait_result[key].keys())[0]\n\n return {\n "runtime": int(time.time() - start),\n "key": key,\n "event_id": event_id,\n }\n\n\n@durable_workflow.durable_task()\nasync def wait_for_multi_sleep(\n _i: EmptyModel, ctx: DurableContext\n) -> dict[str, str | int]:\n start = time.time()\n\n for _ in range(3):\n await ctx.aio_sleep_for(\n timedelta(seconds=SLEEP_TIME),\n )\n\n return {\n "runtime": int(time.time() - start),\n }\n\n\n@ephemeral_workflow.task()\ndef ephemeral_task_2(input: EmptyModel, ctx: Context) -> None:\n print("Running non-durable task")\n\n\n@hatchet.durable_task()\nasync def wait_for_sleep_twice(\n input: EmptyModel, ctx: DurableContext\n) -> dict[str, int]:\n try:\n start = time.time()\n\n await ctx.aio_sleep_for(\n timedelta(seconds=SLEEP_TIME),\n )\n\n return {\n "runtime": int(time.time() - start),\n }\n except asyncio.CancelledError:\n return {"runtime": -1}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "durable-worker",\n workflows=[durable_workflow, ephemeral_workflow, wait_for_sleep_twice],\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/durable/worker.py', - blocks: { - create_a_durable_workflow: { - start: 19, - stop: 19, - }, - add_durable_task: { - start: 26, - stop: 52, - }, - add_durable_tasks_that_wait_for_or_groups: { - start: 56, - stop: 80, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/index.ts deleted file mode 100644 index 245a4ee68..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/trigger.ts deleted file mode 100644 index 8bb9178e9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\n\nfrom examples.durable_event.worker import (\n EVENT_KEY,\n durable_event_task,\n durable_event_task_with_filter,\n hatchet,\n)\n\ndurable_event_task.run_no_wait()\ndurable_event_task_with_filter.run_no_wait()\n\nprint("Sleeping")\ntime.sleep(2)\n\nprint("Pushing event")\nhatchet.event.push(\n EVENT_KEY,\n {\n "user_id": "1234",\n },\n)\n', - source: 'out/python/durable_event/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/worker.ts deleted file mode 100644 index 45141f706..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable_event/worker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import DurableContext, EmptyModel, Hatchet, UserEventCondition\n\nhatchet = Hatchet(debug=True)\n\nEVENT_KEY = "user:update"\n\n\n# > Durable Event\n@hatchet.durable_task(name="DurableEventTask")\nasync def durable_event_task(input: EmptyModel, ctx: DurableContext) -> None:\n res = await ctx.aio_wait_for(\n "event",\n UserEventCondition(event_key="user:update"),\n )\n\n print("got event", res)\n\n\n\n\n@hatchet.durable_task(name="DurableEventWithFilterTask")\nasync def durable_event_task_with_filter(\n input: EmptyModel, ctx: DurableContext\n) -> None:\n # > Durable Event With Filter\n res = await ctx.aio_wait_for(\n "event",\n UserEventCondition(\n event_key="user:update", expression="input.user_id == \'1234\'"\n ),\n )\n\n print("got event", res)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "durable-event-worker",\n workflows=[durable_event_task, durable_event_task_with_filter],\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/durable_event/worker.py', - blocks: { - durable_event: { - start: 9, - stop: 18, - }, - durable_event_with_filter: { - start: 26, - stop: 31, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/index.ts deleted file mode 100644 index 245a4ee68..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/trigger.ts deleted file mode 100644 index 5d91271f0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.durable_sleep.worker import durable_sleep_task\n\ndurable_sleep_task.run_no_wait()\n', - source: 'out/python/durable_sleep/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/worker.ts deleted file mode 100644 index 785ab7bc7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/durable_sleep/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from datetime import timedelta\n\nfrom hatchet_sdk import DurableContext, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Durable Sleep\n@hatchet.durable_task(name="DurableSleepTask")\nasync def durable_sleep_task(input: EmptyModel, ctx: DurableContext) -> None:\n res = await ctx.aio_sleep_for(timedelta(seconds=5))\n\n print("got result", res)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("durable-sleep-worker", workflows=[durable_sleep_task])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/durable_sleep/worker.py', - blocks: { - durable_sleep: { - start: 9, - stop: 15, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/events/event.ts b/frontend/app/src/next/lib/docs/generated/snips/python/events/event.ts deleted file mode 100644 index 91515f792..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/events/event.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n# > Event trigger\nhatchet.event.push("user:create", {"should_skip": False})\n', - source: 'out/python/events/event.py', - blocks: { - event_trigger: { - start: 6, - stop: 6, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/events/filter.ts b/frontend/app/src/next/lib/docs/generated/snips/python/events/filter.ts deleted file mode 100644 index bf14d081a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/events/filter.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.events.worker import EVENT_KEY, event_workflow\nfrom hatchet_sdk import Hatchet, PushEventOptions\n\nhatchet = Hatchet()\n\n# > Create a filter\nhatchet.filters.create(\n workflow_id=event_workflow.id,\n expression="input.should_skip == false",\n scope="foobarbaz",\n payload={\n "main_character": "Anna",\n "supporting_character": "Stiva",\n "location": "Moscow",\n },\n)\n\n# > Skip a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n "should_skip": True,\n },\n options=PushEventOptions(\n scope="foobarbaz",\n ),\n)\n\n# > Trigger a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n "should_skip": False,\n },\n options=PushEventOptions(\n scope="foobarbaz",\n ),\n)\n', - source: 'out/python/events/filter.py', - blocks: { - create_a_filter: { - start: 7, - stop: 16, - }, - skip_a_run: { - start: 19, - stop: 27, - }, - trigger_a_run: { - start: 30, - stop: 38, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/events/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/events/index.ts deleted file mode 100644 index c9ab180d1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/events/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import event from './event'; -import filter from './filter'; -import test_event from './test_event'; -import worker from './worker'; - -export { event }; -export { filter }; -export { test_event }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/events/test_event.ts b/frontend/app/src/next/lib/docs/generated/snips/python/events/test_event.ts deleted file mode 100644 index 6ffa16269..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/events/test_event.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nimport json\nfrom collections.abc import AsyncGenerator\nfrom contextlib import asynccontextmanager\nfrom datetime import datetime, timedelta, timezone\nfrom typing import cast\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.events.worker import (\n EVENT_KEY,\n SECONDARY_KEY,\n WILDCARD_KEY,\n EventWorkflowInput,\n event_workflow,\n)\nfrom hatchet_sdk.clients.events import (\n BulkPushEventOptions,\n BulkPushEventWithMetadata,\n PushEventOptions,\n)\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\nfrom hatchet_sdk.contracts.events_pb2 import Event\nfrom hatchet_sdk.hatchet import Hatchet\n\n\nclass ProcessedEvent(BaseModel):\n id: str\n payload: dict[str, str | bool]\n meta: dict[str, str | bool | int]\n should_have_runs: bool\n test_run_id: str\n\n def __hash__(self) -> int:\n return hash(self.model_dump_json())\n\n\n@asynccontextmanager\nasync def event_filter(\n hatchet: Hatchet,\n test_run_id: str,\n expression: str | None = None,\n payload: dict[str, str] = {},\n scope: str | None = None,\n) -> AsyncGenerator[None, None]:\n expression = (\n expression\n or f"input.should_skip == false && payload.test_run_id == \'{test_run_id}\'"\n )\n\n f = await hatchet.filters.aio_create(\n workflow_id=event_workflow.id,\n expression=expression,\n scope=scope or test_run_id,\n payload={"test_run_id": test_run_id, **payload},\n )\n\n try:\n yield\n finally:\n await hatchet.filters.aio_delete(f.metadata.id)\n\n\nasync def fetch_runs_for_event(\n hatchet: Hatchet, event: Event\n) -> tuple[ProcessedEvent, list[V1TaskSummary]]:\n runs = await hatchet.runs.aio_list(triggering_event_external_id=event.eventId)\n\n meta = (\n cast(dict[str, str | int | bool], json.loads(event.additionalMetadata))\n if event.additionalMetadata\n else {}\n )\n payload = (\n cast(dict[str, str | bool], json.loads(event.payload)) if event.payload else {}\n )\n\n processed_event = ProcessedEvent(\n id=event.eventId,\n payload=payload,\n meta=meta,\n should_have_runs=meta.get("should_have_runs", False) is True,\n test_run_id=cast(str, meta["test_run_id"]),\n )\n\n if not all([r.output for r in runs.rows]):\n return (processed_event, [])\n\n return (\n processed_event,\n runs.rows or [],\n )\n\n\nasync def wait_for_result(\n hatchet: Hatchet, events: list[Event]\n) -> dict[ProcessedEvent, list[V1TaskSummary]]:\n await asyncio.sleep(3)\n\n since = datetime.now(tz=timezone.utc) - timedelta(minutes=2)\n\n persisted = (await hatchet.event.aio_list(limit=100, since=since)).rows or []\n\n assert {e.eventId for e in events}.issubset({e.metadata.id for e in persisted})\n\n iters = 0\n while True:\n print("Waiting for event runs to complete...")\n if iters > 15:\n print("Timed out waiting for event runs to complete.")\n return {\n ProcessedEvent(\n id=event.eventId,\n payload=json.loads(event.payload) if event.payload else {},\n meta=(\n json.loads(event.additionalMetadata)\n if event.additionalMetadata\n else {}\n ),\n should_have_runs=False,\n test_run_id=cast(\n str, json.loads(event.additionalMetadata).get("test_run_id", "")\n ),\n ): []\n for event in events\n }\n\n iters += 1\n\n event_runs = await asyncio.gather(\n *[fetch_runs_for_event(hatchet, event) for event in events]\n )\n\n all_empty = all(not event_run for _, event_run in event_runs)\n\n if all_empty:\n await asyncio.sleep(1)\n continue\n\n event_id_to_runs = {event_id: runs for (event_id, runs) in event_runs}\n\n any_queued_or_running = any(\n run.status in [V1TaskStatus.QUEUED, V1TaskStatus.RUNNING]\n for runs in event_id_to_runs.values()\n for run in runs\n )\n\n if any_queued_or_running:\n await asyncio.sleep(1)\n continue\n\n break\n\n return event_id_to_runs\n\n\nasync def wait_for_result_and_assert(hatchet: Hatchet, events: list[Event]) -> None:\n event_to_runs = await wait_for_result(hatchet, events)\n\n for event, runs in event_to_runs.items():\n await assert_event_runs_processed(event, runs)\n\n\nasync def assert_event_runs_processed(\n event: ProcessedEvent,\n runs: list[V1TaskSummary],\n) -> None:\n runs = [\n run\n for run in runs\n if (run.additional_metadata or {}).get("hatchet__event_id") == event.id\n ]\n\n if event.should_have_runs:\n assert len(runs) > 0\n\n for run in runs:\n assert run.status == V1TaskStatus.COMPLETED\n assert run.output.get("test_run_id") == event.test_run_id\n else:\n assert len(runs) == 0\n\n\ndef bpi(\n index: int = 1,\n test_run_id: str = "",\n should_skip: bool = False,\n should_have_runs: bool = True,\n key: str = EVENT_KEY,\n payload: dict[str, str] = {},\n scope: str | None = None,\n) -> BulkPushEventWithMetadata:\n return BulkPushEventWithMetadata(\n key=key,\n payload={\n "should_skip": should_skip,\n **payload,\n },\n additional_metadata={\n "should_have_runs": should_have_runs,\n "test_run_id": test_run_id,\n "key": index,\n },\n scope=scope,\n )\n\n\ndef cp(should_skip: bool) -> dict[str, bool]:\n return EventWorkflowInput(should_skip=should_skip).model_dump()\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_event_push(hatchet: Hatchet) -> None:\n e = hatchet.event.push(EVENT_KEY, cp(False))\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_async_event_push(hatchet: Hatchet) -> None:\n e = await hatchet.event.aio_push(EVENT_KEY, cp(False))\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_async_event_bulk_push(hatchet: Hatchet) -> None:\n events = [\n BulkPushEventWithMetadata(\n key="event1",\n payload={"message": "This is event 1", "should_skip": False},\n additional_metadata={"source": "test", "user_id": "user123"},\n ),\n BulkPushEventWithMetadata(\n key="event2",\n payload={"message": "This is event 2", "should_skip": False},\n additional_metadata={"source": "test", "user_id": "user456"},\n ),\n BulkPushEventWithMetadata(\n key="event3",\n payload={"message": "This is event 3", "should_skip": False},\n additional_metadata={"source": "test", "user_id": "user789"},\n ),\n ]\n opts = BulkPushEventOptions(namespace="bulk-test")\n\n e = await hatchet.event.aio_bulk_push(events, opts)\n\n assert len(e) == 3\n\n # Sort both lists of events by their key to ensure comparison order\n sorted_events = sorted(events, key=lambda x: x.key)\n sorted_returned_events = sorted(e, key=lambda x: x.key)\n namespace = "bulk-test"\n\n # Check that the returned events match the original events\n for original_event, returned_event in zip(\n sorted_events, sorted_returned_events, strict=False\n ):\n assert returned_event.key == namespace + original_event.key\n\n\n@pytest.fixture(scope="function")\ndef test_run_id() -> str:\n return str(uuid4())\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_event_engine_behavior(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n events = [\n bpi(\n test_run_id=test_run_id,\n ),\n bpi(\n test_run_id=test_run_id,\n key="thisisafakeeventfoobarbaz",\n should_have_runs=False,\n ),\n ]\n\n result = await hatchet.event.aio_bulk_push(events)\n\n await wait_for_result_and_assert(hatchet, result)\n\n\ndef gen_bulk_events(test_run_id: str) -> list[BulkPushEventWithMetadata]:\n return [\n ## No scope, so it shouldn\'t have any runs\n bpi(\n index=1,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=False,\n ),\n ## No scope, so it shouldn\'t have any runs\n bpi(\n index=2,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n ),\n ## Scope is set and `should_skip` is False, so it should have runs\n bpi(\n index=3,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=True,\n scope=test_run_id,\n ),\n ## Scope is set and `should_skip` is True, so it shouldn\'t have runs\n bpi(\n index=4,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n scope=test_run_id,\n ),\n ## Scope is set, `should_skip` is False, but key is different, so it shouldn\'t have runs\n bpi(\n index=5,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n scope=test_run_id,\n key="thisisafakeeventfoobarbaz",\n ),\n ## Scope is set, `should_skip` is False, but key is different, so it shouldn\'t have runs\n bpi(\n index=6,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=False,\n scope=test_run_id,\n key="thisisafakeeventfoobarbaz",\n ),\n ]\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_event_skipping_filtering(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(hatchet, test_run_id):\n events = gen_bulk_events(test_run_id)\n\n result = await hatchet.event.aio_bulk_push(events)\n\n await wait_for_result_and_assert(hatchet, result)\n\n\nasync def bulk_to_single(hatchet: Hatchet, event: BulkPushEventWithMetadata) -> Event:\n return await hatchet.event.aio_push(\n event_key=event.key,\n payload=event.payload,\n options=PushEventOptions(\n scope=event.scope,\n additional_metadata=event.additional_metadata,\n priority=event.priority,\n ),\n )\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_event_skipping_filtering_no_bulk(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(hatchet, test_run_id):\n raw_events = gen_bulk_events(test_run_id)\n events = await asyncio.gather(\n *[bulk_to_single(hatchet, event) for event in raw_events]\n )\n\n await wait_for_result_and_assert(hatchet, events)\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_event_payload_filtering(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n "input.should_skip == false && payload.foobar == \'baz\'",\n {"foobar": "qux"},\n ):\n event = await hatchet.event.aio_push(\n event_key=EVENT_KEY,\n payload={"message": "This is event 1", "should_skip": False},\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n "should_have_runs": False,\n "test_run_id": test_run_id,\n "key": 1,\n },\n ),\n )\n\n await wait_for_result_and_assert(hatchet, [event])\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_event_payload_filtering_with_payload_match(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n "input.should_skip == false && payload.foobar == \'baz\'",\n {"foobar": "baz"},\n ):\n event = await hatchet.event.aio_push(\n event_key=EVENT_KEY,\n payload={"message": "This is event 1", "should_skip": False},\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n "should_have_runs": True,\n "test_run_id": test_run_id,\n "key": 1,\n },\n ),\n )\n\n await wait_for_result_and_assert(hatchet, [event])\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_filtering_by_event_key(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n f"event_key == \'{SECONDARY_KEY}\'",\n ):\n event_1 = await hatchet.event.aio_push(\n event_key=SECONDARY_KEY,\n payload={\n "message": "Should run because filter matches",\n "should_skip": False,\n },\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n "should_have_runs": True,\n "test_run_id": test_run_id,\n },\n ),\n )\n event_2 = await hatchet.event.aio_push(\n event_key=EVENT_KEY,\n payload={\n "message": "Should skip because filter does not match",\n "should_skip": False,\n },\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n "should_have_runs": False,\n "test_run_id": test_run_id,\n },\n ),\n )\n\n await wait_for_result_and_assert(hatchet, [event_1, event_2])\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_key_wildcards(hatchet: Hatchet, test_run_id: str) -> None:\n keys = [\n WILDCARD_KEY.replace("*", "1"),\n WILDCARD_KEY.replace("*", "2"),\n "foobar",\n EVENT_KEY,\n ]\n\n async with event_filter(\n hatchet,\n test_run_id,\n ):\n events = [\n await hatchet.event.aio_push(\n event_key=key,\n payload={\n "should_skip": False,\n },\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n "should_have_runs": key != "foobar",\n "test_run_id": test_run_id,\n },\n ),\n )\n for key in keys\n ]\n\n await wait_for_result_and_assert(hatchet, events)\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_multiple_runs_for_multiple_scope_matches(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(\n hatchet, test_run_id, payload={"filter_id": "1"}, expression="1 == 1"\n ):\n async with event_filter(\n hatchet, test_run_id, payload={"filter_id": "2"}, expression="2 == 2"\n ):\n event = await hatchet.event.aio_push(\n event_key=EVENT_KEY,\n payload={\n "should_skip": False,\n },\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n "should_have_runs": True,\n "test_run_id": test_run_id,\n },\n ),\n )\n\n event_to_runs = await wait_for_result(hatchet, [event])\n\n assert len(event_to_runs.keys()) == 1\n\n runs = list(event_to_runs.values())[0]\n\n assert len(runs) == 2\n\n assert {r.output.get("filter_id") for r in runs} == {"1", "2"}\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_multi_scope_bug(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(hatchet, test_run_id, expression="1 == 1", scope="a"):\n async with event_filter(\n hatchet,\n test_run_id,\n expression="2 == 2",\n scope="b",\n ):\n events = await hatchet.event.aio_bulk_push(\n [\n BulkPushEventWithMetadata(\n key=EVENT_KEY,\n payload={\n "should_skip": False,\n },\n additional_metadata={\n "should_have_runs": True,\n "test_run_id": test_run_id,\n },\n scope="a" if i % 2 == 0 else "b",\n )\n for i in range(100)\n ],\n )\n\n await asyncio.sleep(15)\n\n for event in events:\n runs = await hatchet.runs.aio_list(\n triggering_event_external_id=event.eventId,\n additional_metadata={"test_run_id": test_run_id},\n )\n\n assert len(runs.rows) == 1\n', - source: 'out/python/events/test_event.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/events/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/events/worker.ts deleted file mode 100644 index c2c5c61ff..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/events/worker.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, DefaultFilter, Hatchet\n\nhatchet = Hatchet()\n\n\n# > Event trigger\nEVENT_KEY = "user:create"\nSECONDARY_KEY = "foobarbaz"\nWILDCARD_KEY = "subscription:*"\n\n\nclass EventWorkflowInput(BaseModel):\n should_skip: bool\n\n\nevent_workflow = hatchet.workflow(\n name="EventWorkflow",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n)\n\n# > Event trigger with filter\nevent_workflow_with_filter = hatchet.workflow(\n name="EventWorkflow",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n default_filters=[\n DefaultFilter(\n expression="true",\n scope="example-scope",\n payload={\n "main_character": "Anna",\n "supporting_character": "Stiva",\n "location": "Moscow",\n },\n )\n ],\n)\n\n\n@event_workflow.task()\ndef task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:\n print("event received")\n\n return ctx.filter_payload\n\n\n# > Accessing the filter payload\n@event_workflow_with_filter.task()\ndef filtered_task(input: EventWorkflowInput, ctx: Context) -> None:\n print(ctx.filter_payload)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(name="EventWorker", workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/events/worker.py', - blocks: { - event_trigger: { - start: 9, - stop: 22, - }, - event_trigger_with_filter: { - start: 25, - stop: 40, - }, - accessing_the_filter_payload: { - start: 51, - stop: 55, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout/index.ts deleted file mode 100644 index 34bbe2039..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import stream from './stream'; -import sync_stream from './sync_stream'; -import test_fanout from './test_fanout'; -import trigger from './trigger'; -import worker from './worker'; - -export { stream }; -export { sync_stream }; -export { test_fanout }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/stream.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout/stream.ts deleted file mode 100644 index da6e68ec7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/stream.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nimport random\n\nfrom examples.fanout.worker import ParentInput, parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\n\nasync def main() -> None:\n\n hatchet = Hatchet()\n\n # Generate a random stream key to use to track all\n # stream events for this workflow run.\n\n streamKey = "streamKey"\n streamVal = f"sk-{random.randint(1, 100)}"\n\n # Specify the stream key as additional metadata\n # when running the workflow.\n\n # This key gets propagated to all child workflows\n # and can have an arbitrary property name.\n\n parent_wf.run(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={streamKey: streamVal}),\n )\n\n # Stream all events for the additional meta key value\n listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal)\n\n async for event in listener:\n print(event.type, event.payload)\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/fanout/stream.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/sync_stream.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout/sync_stream.ts deleted file mode 100644 index e15edf797..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/sync_stream.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import random\n\nfrom examples.fanout.worker import ParentInput, parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\n\ndef main() -> None:\n\n hatchet = Hatchet()\n\n # Generate a random stream key to use to track all\n # stream events for this workflow run.\n\n streamKey = "streamKey"\n streamVal = f"sk-{random.randint(1, 100)}"\n\n # Specify the stream key as additional metadata\n # when running the workflow.\n\n # This key gets propagated to all child workflows\n # and can have an arbitrary property name.\n\n parent_wf.run(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={streamKey: streamVal}),\n )\n\n # Stream all events for the additional meta key value\n listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal)\n\n for event in listener:\n print(event.type, event.payload)\n\n print("DONE.")\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/fanout/sync_stream.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/test_fanout.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout/test_fanout.ts deleted file mode 100644 index a6afc8da4..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/test_fanout.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom uuid import uuid4\n\nimport pytest\n\nfrom examples.fanout.worker import ParentInput, parent_wf\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run(hatchet: Hatchet) -> None:\n ref = await parent_wf.aio_run_no_wait(\n ParentInput(n=2),\n )\n\n result = await ref.aio_result()\n\n assert len(result["spawn"]["results"]) == 2\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_additional_metadata_propagation(hatchet: Hatchet) -> None:\n test_run_id = uuid4().hex\n\n ref = await parent_wf.aio_run_no_wait(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(\n additional_metadata={"test_run_id": test_run_id}\n ),\n )\n\n await ref.aio_result()\n await asyncio.sleep(1)\n\n runs = await hatchet.runs.aio_list(\n parent_task_external_id=ref.workflow_run_id,\n additional_metadata={"test_run_id": test_run_id},\n )\n\n assert runs.rows\n\n """Assert that the additional metadata is propagated to the child runs."""\n for run in runs.rows:\n assert run.additional_metadata\n assert run.additional_metadata["test_run_id"] == test_run_id\n\n assert run.children\n for child in run.children:\n assert child.additional_metadata\n assert child.additional_metadata["test_run_id"] == test_run_id\n', - source: 'out/python/fanout/test_fanout.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout/trigger.ts deleted file mode 100644 index c041a9520..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom examples.fanout.worker import ParentInput, parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\nhatchet = Hatchet()\n\n\nasync def main() -> None:\n await parent_wf.aio_run(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={"hello": "moon"}),\n )\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/fanout/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout/worker.ts deleted file mode 100644 index 39897b3de..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout/worker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from datetime import timedelta\nfrom typing import Any\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet, TriggerWorkflowOptions\n\nhatchet = Hatchet(debug=True)\n\n\n# > FanoutParent\nclass ParentInput(BaseModel):\n n: int = 100\n\n\nclass ChildInput(BaseModel):\n a: str\n\n\nparent_wf = hatchet.workflow(name="FanoutParent", input_validator=ParentInput)\nchild_wf = hatchet.workflow(name="FanoutChild", input_validator=ChildInput)\n\n\n@parent_wf.task(execution_timeout=timedelta(minutes=5))\nasync def spawn(input: ParentInput, ctx: Context) -> dict[str, Any]:\n print("spawning child")\n\n result = await child_wf.aio_run_many(\n [\n child_wf.create_bulk_run_item(\n input=ChildInput(a=str(i)),\n options=TriggerWorkflowOptions(\n additional_metadata={"hello": "earth"}, key=f"child{i}"\n ),\n )\n for i in range(input.n)\n ],\n )\n\n print(f"results {result}")\n\n return {"results": result}\n\n\n\n\n# > FanoutChild\n@child_wf.task()\nasync def process(input: ChildInput, ctx: Context) -> dict[str, str]:\n print(f"child process {input.a}")\n return {"status": input.a}\n\n\n@child_wf.task(parents=[process])\nasync def process2(input: ChildInput, ctx: Context) -> dict[str, str]:\n process_output = ctx.task_output(process)\n a = process_output["status"]\n\n return {"status2": a + "2"}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("fanout-worker", slots=40, workflows=[parent_wf, child_wf])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/fanout/worker.py', - blocks: { - fanoutparent: { - start: 12, - stop: 44, - }, - fanoutchild: { - start: 48, - stop: 61, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/index.ts deleted file mode 100644 index 22c8b3f2c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_fanout_sync from './test_fanout_sync'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_fanout_sync }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/test_fanout_sync.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/test_fanout_sync.ts deleted file mode 100644 index b82fd1373..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/test_fanout_sync.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom uuid import uuid4\n\nimport pytest\n\nfrom examples.fanout_sync.worker import ParentInput, sync_fanout_parent\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\n\n\ndef test_run() -> None:\n N = 2\n\n result = sync_fanout_parent.run(ParentInput(n=N))\n\n assert len(result["spawn"]["results"]) == N\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_additional_metadata_propagation_sync(hatchet: Hatchet) -> None:\n test_run_id = uuid4().hex\n\n ref = await sync_fanout_parent.aio_run_no_wait(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(\n additional_metadata={"test_run_id": test_run_id}\n ),\n )\n\n await ref.aio_result()\n await asyncio.sleep(1)\n\n runs = await hatchet.runs.aio_list(\n parent_task_external_id=ref.workflow_run_id,\n additional_metadata={"test_run_id": test_run_id},\n )\n\n assert runs.rows\n\n """Assert that the additional metadata is propagated to the child runs."""\n for run in runs.rows:\n assert run.additional_metadata\n assert run.additional_metadata["test_run_id"] == test_run_id\n\n assert run.children\n for child in run.children:\n assert child.additional_metadata\n assert child.additional_metadata["test_run_id"] == test_run_id\n', - source: 'out/python/fanout_sync/test_fanout_sync.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/trigger.ts deleted file mode 100644 index 087c70fb5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom examples.fanout_sync.worker import ParentInput, sync_fanout_parent\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\n\nhatchet = Hatchet()\n\n\nasync def main() -> None:\n sync_fanout_parent.run(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={"hello": "moon"}),\n )\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/fanout_sync/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/worker.ts deleted file mode 100644 index f6e78da69..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/fanout_sync/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from datetime import timedelta\nfrom typing import Any\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet, TriggerWorkflowOptions\n\nhatchet = Hatchet(debug=True)\n\n\nclass ParentInput(BaseModel):\n n: int = 5\n\n\nclass ChildInput(BaseModel):\n a: str\n\n\nsync_fanout_parent = hatchet.workflow(\n name="SyncFanoutParent", input_validator=ParentInput\n)\nsync_fanout_child = hatchet.workflow(name="SyncFanoutChild", input_validator=ChildInput)\n\n\n@sync_fanout_parent.task(execution_timeout=timedelta(minutes=5))\ndef spawn(input: ParentInput, ctx: Context) -> dict[str, list[dict[str, Any]]]:\n print("spawning child")\n\n results = sync_fanout_child.run_many(\n [\n sync_fanout_child.create_bulk_run_item(\n input=ChildInput(a=str(i)),\n key=f"child{i}",\n options=TriggerWorkflowOptions(additional_metadata={"hello": "earth"}),\n )\n for i in range(input.n)\n ],\n )\n\n print(f"results {results}")\n\n return {"results": results}\n\n\n@sync_fanout_child.task()\ndef process(input: ChildInput, ctx: Context) -> dict[str, str]:\n return {"status": "success " + input.a}\n\n\n@sync_fanout_child.task(parents=[process])\ndef process2(input: ChildInput, ctx: Context) -> dict[str, str]:\n process_output = ctx.task_output(process)\n a = process_output["status"]\n\n return {"status2": a + "2"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "sync-fanout-worker",\n slots=40,\n workflows=[sync_fanout_parent, sync_fanout_child],\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/fanout_sync/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/hatchet_client.ts b/frontend/app/src/next/lib/docs/generated/snips/python/hatchet_client.ts deleted file mode 100644 index 401a8c70e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/hatchet_client.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Hatchet\n\n# Initialize Hatchet client\nhatchet = Hatchet()\n', - source: 'out/python/hatchet_client.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/index.ts deleted file mode 100644 index ef700dd95..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/index.ts +++ /dev/null @@ -1,99 +0,0 @@ -import __init__ from './__init__'; -import hatchet_client from './hatchet_client'; -import worker from './worker'; -import * as affinity_workers from './affinity_workers'; -import * as api from './api'; -import * as blocked_async from './blocked_async'; -import * as bulk_fanout from './bulk_fanout'; -import * as bulk_operations from './bulk_operations'; -import * as cancellation from './cancellation'; -import * as child from './child'; -import * as concurrency_limit from './concurrency_limit'; -import * as concurrency_limit_rr from './concurrency_limit_rr'; -import * as concurrency_limit_rr_load from './concurrency_limit_rr_load'; -import * as concurrency_multiple_keys from './concurrency_multiple_keys'; -import * as concurrency_workflow_level from './concurrency_workflow_level'; -import * as conditions from './conditions'; -import * as cron from './cron'; -import * as dag from './dag'; -import * as dedupe from './dedupe'; -import * as delayed from './delayed'; -import * as dependency_injection from './dependency_injection'; -import * as durable from './durable'; -import * as durable_event from './durable_event'; -import * as durable_sleep from './durable_sleep'; -import * as events from './events'; -import * as fanout from './fanout'; -import * as fanout_sync from './fanout_sync'; -import * as lifespans from './lifespans'; -import * as logger from './logger'; -import * as manual_slot_release from './manual_slot_release'; -import * as migration_guides from './migration_guides'; -import * as non_retryable from './non_retryable'; -import * as on_failure from './on_failure'; -import * as on_success from './on_success'; -import * as opentelemetry_instrumentation from './opentelemetry_instrumentation'; -import * as priority from './priority'; -import * as quickstart from './quickstart'; -import * as rate_limit from './rate_limit'; -import * as retries from './retries'; -import * as return_exceptions from './return_exceptions'; -import * as scheduled from './scheduled'; -import * as simple from './simple'; -import * as sticky_workers from './sticky_workers'; -import * as streaming from './streaming'; -import * as timeout from './timeout'; -import * as unit_testing from './unit_testing'; -import * as webhooks from './webhooks'; -import * as worker_existing_loop from './worker_existing_loop'; -import * as workflow_registration from './workflow_registration'; - -export { __init__ }; -export { hatchet_client }; -export { worker }; -export { affinity_workers }; -export { api }; -export { blocked_async }; -export { bulk_fanout }; -export { bulk_operations }; -export { cancellation }; -export { child }; -export { concurrency_limit }; -export { concurrency_limit_rr }; -export { concurrency_limit_rr_load }; -export { concurrency_multiple_keys }; -export { concurrency_workflow_level }; -export { conditions }; -export { cron }; -export { dag }; -export { dedupe }; -export { delayed }; -export { dependency_injection }; -export { durable }; -export { durable_event }; -export { durable_sleep }; -export { events }; -export { fanout }; -export { fanout_sync }; -export { lifespans }; -export { logger }; -export { manual_slot_release }; -export { migration_guides }; -export { non_retryable }; -export { on_failure }; -export { on_success }; -export { opentelemetry_instrumentation }; -export { priority }; -export { quickstart }; -export { rate_limit }; -export { retries }; -export { return_exceptions }; -export { scheduled }; -export { simple }; -export { sticky_workers }; -export { streaming }; -export { timeout }; -export { unit_testing }; -export { webhooks }; -export { worker_existing_loop }; -export { workflow_registration }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/index.ts deleted file mode 100644 index 8def07b87..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import simple from './simple'; -import test_lifespans from './test_lifespans'; -import trigger from './trigger'; -import worker from './worker'; - -export { simple }; -export { test_lifespans }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/simple.ts b/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/simple.ts deleted file mode 100644 index 186bacf6f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/simple.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Lifespan\n\nfrom collections.abc import AsyncGenerator\nfrom typing import cast\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass Lifespan(BaseModel):\n foo: str\n pi: float\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n yield Lifespan(foo="bar", pi=3.14)\n\n\n@hatchet.task(name="LifespanWorkflow")\ndef lifespan_task(input: EmptyModel, ctx: Context) -> Lifespan:\n return cast(Lifespan, ctx.lifespan)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "test-worker", slots=1, workflows=[lifespan_task], lifespan=lifespan\n )\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/lifespans/simple.py', - blocks: { - lifespan: { - start: 2, - stop: 33, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/test_lifespans.ts b/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/test_lifespans.ts deleted file mode 100644 index 7427fbf7c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/test_lifespans.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import pytest\n\nfrom examples.lifespans.simple import Lifespan, lifespan_task\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_lifespans() -> None:\n result = await lifespan_task.aio_run()\n\n assert isinstance(result, Lifespan)\n assert result.pi == 3.14\n assert result.foo == "bar"\n', - source: 'out/python/lifespans/test_lifespans.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/trigger.ts deleted file mode 100644 index 0ba39bee9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.lifespans.worker import lifespan_workflow\n\nresult = lifespan_workflow.run()\n\nprint(result)\n', - source: 'out/python/lifespans/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/worker.ts deleted file mode 100644 index 7ffba68cc..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/lifespans/worker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from collections.abc import AsyncGenerator\nfrom typing import cast\nfrom uuid import UUID\n\nfrom psycopg_pool import ConnectionPool\nfrom pydantic import BaseModel, ConfigDict\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Use the lifespan in a task\nclass TaskOutput(BaseModel):\n num_rows: int\n external_ids: list[UUID]\n\n\nlifespan_workflow = hatchet.workflow(name="LifespanWorkflow")\n\n\n@lifespan_workflow.task()\ndef sync_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute("SELECT * FROM v1_lookup_table_olap LIMIT 5;")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print("executed sync task with lifespan", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n\n\n@lifespan_workflow.task()\nasync def async_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute("SELECT * FROM v1_lookup_table_olap LIMIT 5;")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print("executed async task with lifespan", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n# > Define a lifespan\nclass Lifespan(BaseModel):\n model_config = ConfigDict(arbitrary_types_allowed=True)\n\n foo: str\n pool: ConnectionPool\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n print("Running lifespan!")\n with ConnectionPool("postgres://hatchet:hatchet@localhost:5431/hatchet") as pool:\n yield Lifespan(\n foo="bar",\n pool=pool,\n )\n\n print("Cleaning up lifespan!")\n\n\nworker = hatchet.worker(\n "test-worker", slots=1, workflows=[lifespan_workflow], lifespan=lifespan\n)\n\n\ndef main() -> None:\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/lifespans/worker.py', - blocks: { - use_the_lifespan_in_a_task: { - start: 14, - stop: 40, - }, - define_a_lifespan: { - start: 63, - stop: 83, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/logger/client.ts b/frontend/app/src/next/lib/docs/generated/snips/python/logger/client.ts deleted file mode 100644 index 592843548..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/logger/client.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > RootLogger\n\n\nimport logging\n\nfrom hatchet_sdk import ClientConfig, Hatchet\n\nlogging.basicConfig(level=logging.INFO)\n\nroot_logger = logging.getLogger()\n\nhatchet = Hatchet(\n debug=True,\n config=ClientConfig(\n logger=root_logger,\n ),\n)\n\n', - source: 'out/python/logger/client.py', - blocks: { - rootlogger: { - start: 2, - stop: 18, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/logger/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/logger/index.ts deleted file mode 100644 index 5d96f45ea..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/logger/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import client from './client'; -import test_logger from './test_logger'; -import trigger from './trigger'; -import worker from './worker'; -import workflow from './workflow'; - -export { client }; -export { test_logger }; -export { trigger }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/logger/test_logger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/logger/test_logger.ts deleted file mode 100644 index b22ad2d96..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/logger/test_logger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import pytest\n\nfrom examples.logger.workflow import logging_workflow\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run() -> None:\n result = await logging_workflow.aio_run()\n\n assert result["root_logger"]["status"] == "success"\n', - source: 'out/python/logger/test_logger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/logger/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/logger/trigger.ts deleted file mode 100644 index 2b990547f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/logger/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.logger.workflow import logging_workflow\n\nlogging_workflow.run()\n', - source: 'out/python/logger/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/logger/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/logger/worker.ts deleted file mode 100644 index dd85e5ce8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/logger/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.logger.client import hatchet\nfrom examples.logger.workflow import logging_workflow\n\n\ndef main() -> None:\n worker = hatchet.worker("logger-worker", slots=5, workflows=[logging_workflow])\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/logger/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/logger/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/python/logger/workflow.ts deleted file mode 100644 index 48fe75854..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/logger/workflow.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > LoggingWorkflow\n\nimport logging\nimport time\n\nfrom examples.logger.client import hatchet\nfrom hatchet_sdk import Context, EmptyModel\n\nlogger = logging.getLogger(__name__)\n\nlogging_workflow = hatchet.workflow(\n name="LoggingWorkflow",\n)\n\n\n@logging_workflow.task()\ndef root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n logger.info(f"executed step1 - {i}")\n logger.info({"step1": "step1"})\n\n time.sleep(0.1)\n\n return {"status": "success"}\n\n\n\n# > ContextLogger\n\n\n@logging_workflow.task()\ndef context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n ctx.log(f"executed step1 - {i}")\n ctx.log({"step1": "step1"})\n\n time.sleep(0.1)\n\n return {"status": "success"}\n\n\n', - source: 'out/python/logger/workflow.py', - blocks: { - loggingworkflow: { - start: 2, - stop: 26, - }, - contextlogger: { - start: 29, - stop: 41, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/manual_slot_release/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/manual_slot_release/index.ts deleted file mode 100644 index c1b323985..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/manual_slot_release/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/manual_slot_release/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/manual_slot_release/worker.ts deleted file mode 100644 index 243742696..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/manual_slot_release/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet()\n\n# > SlotRelease\n\nslot_release_workflow = hatchet.workflow(name="SlotReleaseWorkflow")\n\n\n@slot_release_workflow.task()\ndef step1(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print("RESOURCE INTENSIVE PROCESS")\n time.sleep(10)\n\n # 👀 Release the slot after the resource-intensive process, so that other steps can run\n ctx.release_slot()\n\n print("NON RESOURCE INTENSIVE PROCESS")\n return {"status": "success"}\n\n\n', - source: 'out/python/manual_slot_release/worker.py', - blocks: { - slotrelease: { - start: 8, - stop: 23, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/__init__.ts b/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/__init__.ts deleted file mode 100644 index 95558e2dc..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/__init__.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: '', - source: 'out/python/migration_guides/__init__.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/hatchet_client.ts b/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/hatchet_client.ts deleted file mode 100644 index 8d86d21a8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/hatchet_client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: 'from hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n', - source: 'out/python/migration_guides/hatchet_client.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/index.ts deleted file mode 100644 index 8e102f5fb..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import __init__ from './__init__'; -import hatchet_client from './hatchet_client'; -import mergent from './mergent'; - -export { __init__ }; -export { hatchet_client }; -export { mergent }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/mergent.ts b/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/mergent.ts deleted file mode 100644 index 5cb922f23..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/migration_guides/mergent.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from collections.abc import Mapping\nfrom datetime import datetime, timedelta, timezone\nfrom typing import Any\n\nimport requests\nfrom pydantic import BaseModel\nfrom requests import Response\n\nfrom hatchet_sdk.context.context import Context\n\nfrom .hatchet_client import hatchet\n\n\nasync def process_image(image_url: str, filters: list[str]) -> dict[str, Any]:\n # Do some image processing\n return {"url": image_url, "size": 100, "format": "png"}\n\n\n# > Before (Mergent)\nasync def process_image_task(request: Any) -> dict[str, Any]:\n image_url = request.json["image_url"]\n filters = request.json["filters"]\n try:\n result = await process_image(image_url, filters)\n return {"success": True, "processed_url": result["url"]}\n except Exception as e:\n print(f"Image processing failed: {e}")\n raise\n\n\n\n\n# > After (Hatchet)\nclass ImageProcessInput(BaseModel):\n image_url: str\n filters: list[str]\n\n\nclass ImageProcessOutput(BaseModel):\n processed_url: str\n metadata: dict[str, Any]\n\n\n@hatchet.task(\n name="image-processor",\n retries=3,\n execution_timeout="10m",\n input_validator=ImageProcessInput,\n)\nasync def image_processor(input: ImageProcessInput, ctx: Context) -> ImageProcessOutput:\n # Do some image processing\n result = await process_image(input.image_url, input.filters)\n\n if not result["url"]:\n raise ValueError("Processing failed to generate URL")\n\n return ImageProcessOutput(\n processed_url=result["url"],\n metadata={\n "size": result["size"],\n "format": result["format"],\n "applied_filters": input.filters,\n },\n )\n\n\n\n\nasync def run() -> None:\n # > Running a task (Mergent)\n headers: Mapping[str, str] = {\n "Authorization": "Bearer ",\n "Content-Type": "application/json",\n }\n\n task_data = {\n "name": "4cf95241-fa19-47ef-8a67-71e483747649",\n "queue": "default",\n "request": {\n "url": "https://example.com",\n "headers": {\n "Authorization": "fake-secret-token",\n "Content-Type": "application/json",\n },\n "body": "Hello, world!",\n },\n }\n\n try:\n response: Response = requests.post(\n "https://api.mergent.co/v2/tasks",\n headers=headers,\n json=task_data,\n )\n print(response.json())\n except Exception as e:\n print(f"Error: {e}")\n\n # > Running a task (Hatchet)\n result = await image_processor.aio_run(\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"])\n )\n\n # you can await fully typed results\n print(result)\n\n\nasync def schedule() -> None:\n # > Scheduling tasks (Mergent)\n options = {\n # same options as before\n "json": {\n # same body as before\n "delay": "5m"\n }\n }\n\n print(options)\n\n # > Scheduling tasks (Hatchet)\n # Schedule the task to run at a specific time\n run_at = datetime.now(tz=timezone.utc) + timedelta(days=1)\n await image_processor.aio_schedule(\n run_at,\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"]),\n )\n\n # Schedule the task to run every hour\n await image_processor.aio_create_cron(\n "run-hourly",\n "0 * * * *",\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"]),\n )\n', - source: 'out/python/migration_guides/mergent.py', - blocks: { - before_mergent: { - start: 20, - stop: 30, - }, - after_hatchet: { - start: 34, - stop: 66, - }, - running_a_task_mergent: { - start: 71, - stop: 97, - }, - running_a_task_hatchet: { - start: 100, - stop: 105, - }, - scheduling_tasks_mergent: { - start: 110, - stop: 116, - }, - scheduling_tasks_hatchet: { - start: 121, - stop: 133, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/index.ts deleted file mode 100644 index 1b7026e39..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_no_retry from './test_no_retry'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_no_retry }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/test_no_retry.ts b/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/test_no_retry.ts deleted file mode 100644 index d903b0047..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/test_no_retry.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nimport pytest\n\nfrom examples.non_retryable.worker import (\n non_retryable_workflow,\n should_not_retry,\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n)\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType\nfrom hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails\nfrom hatchet_sdk.exceptions import FailedTaskRunExceptionGroup\n\n\ndef find_id(runs: V1WorkflowRunDetails, match: str) -> str:\n return next(t.metadata.id for t in runs.tasks if match in t.display_name)\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_no_retry(hatchet: Hatchet) -> None:\n ref = await non_retryable_workflow.aio_run_no_wait()\n\n with pytest.raises(FailedTaskRunExceptionGroup) as exc_info:\n await ref.aio_result()\n\n exception_group = exc_info.value\n\n assert len(exception_group.exceptions) == 2\n\n exc_text = [e.exc for e in exception_group.exceptions]\n\n non_retries = [\n e\n for e in exc_text\n if "This task should retry because it\'s not a NonRetryableException" in e\n ]\n\n other_errors = [e for e in exc_text if "This task should not retry" in e]\n\n assert len(non_retries) == 1\n assert len(other_errors) == 1\n\n await asyncio.sleep(3)\n\n runs = await hatchet.runs.aio_get(ref.workflow_run_id)\n task_to_id = {\n task: find_id(runs, task.name)\n for task in [\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n should_not_retry,\n ]\n }\n\n retrying_events = [\n e for e in runs.task_events if e.event_type == V1TaskEventType.RETRYING\n ]\n\n """Only one task should be retried."""\n assert len(retrying_events) == 1\n\n """The task id of the retrying events should match the tasks that are retried"""\n assert retrying_events[0].task_id == task_to_id[should_retry_wrong_exception_type]\n\n """Three failed events should emit, one each for the two failing initial runs and one for the retry."""\n assert (\n len([e for e in runs.task_events if e.event_type == V1TaskEventType.FAILED])\n == 3\n )\n', - source: 'out/python/non_retryable/test_no_retry.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/trigger.ts deleted file mode 100644 index cb7209c93..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.non_retryable.worker import non_retryable_workflow\n\nnon_retryable_workflow.run_no_wait()\n', - source: 'out/python/non_retryable/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/worker.ts deleted file mode 100644 index f4827500f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/non_retryable/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Context, EmptyModel, Hatchet\nfrom hatchet_sdk.exceptions import NonRetryableException\n\nhatchet = Hatchet(debug=True)\n\nnon_retryable_workflow = hatchet.workflow(name="NonRetryableWorkflow")\n\n\n# > Non-retryable task\n@non_retryable_workflow.task(retries=1)\ndef should_not_retry(input: EmptyModel, ctx: Context) -> None:\n raise NonRetryableException("This task should not retry")\n\n\n\n\n@non_retryable_workflow.task(retries=1)\ndef should_retry_wrong_exception_type(input: EmptyModel, ctx: Context) -> None:\n raise TypeError("This task should retry because it\'s not a NonRetryableException")\n\n\n@non_retryable_workflow.task(retries=1)\ndef should_not_retry_successful_task(input: EmptyModel, ctx: Context) -> None:\n pass\n\n\ndef main() -> None:\n worker = hatchet.worker("non-retry-worker", workflows=[non_retryable_workflow])\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/non_retryable/worker.py', - blocks: { - non_retryable_task: { - start: 10, - stop: 14, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/index.ts deleted file mode 100644 index 8b8d7dfc1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_on_failure from './test_on_failure'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_on_failure }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/test_on_failure.ts b/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/test_on_failure.ts deleted file mode 100644 index 0e7816aff..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/test_on_failure.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nimport pytest\n\nfrom examples.on_failure.worker import on_failure_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run_timeout(hatchet: Hatchet) -> None:\n run = on_failure_wf.run_no_wait()\n try:\n await run.aio_result()\n\n assert False, "Expected workflow to timeout"\n except Exception as e:\n assert "step1 failed" in str(e)\n\n await asyncio.sleep(5) # Wait for the on_failure job to finish\n\n details = await hatchet.runs.aio_get(run.workflow_run_id)\n\n assert len(details.tasks) == 2\n assert sum(t.status == V1TaskStatus.COMPLETED for t in details.tasks) == 1\n assert sum(t.status == V1TaskStatus.FAILED for t in details.tasks) == 1\n\n completed_task = next(\n t for t in details.tasks if t.status == V1TaskStatus.COMPLETED\n )\n failed_task = next(t for t in details.tasks if t.status == V1TaskStatus.FAILED)\n\n assert "on_failure" in completed_task.display_name\n assert "step1" in failed_task.display_name\n', - source: 'out/python/on_failure/test_on_failure.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/trigger.ts deleted file mode 100644 index b80ee910e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.on_failure.worker import on_failure_wf_with_details\n\non_failure_wf_with_details.run_no_wait()\n', - source: 'out/python/on_failure/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/worker.ts deleted file mode 100644 index 4e345a921..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/on_failure/worker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import json\nfrom datetime import timedelta\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\nfrom hatchet_sdk.exceptions import TaskRunError\n\nhatchet = Hatchet(debug=False)\n\nERROR_TEXT = "step1 failed"\n\n# > OnFailure Step\n# This workflow will fail because the step will throw an error\n# we define an onFailure step to handle this case\n\non_failure_wf = hatchet.workflow(name="OnFailureWorkflow")\n\n\n@on_failure_wf.task(execution_timeout=timedelta(seconds=1))\ndef step1(input: EmptyModel, ctx: Context) -> None:\n # 👀 this step will always raise an exception\n raise Exception(ERROR_TEXT)\n\n\n# 👀 After the workflow fails, this special step will run\n@on_failure_wf.on_failure_task()\ndef on_failure(input: EmptyModel, ctx: Context) -> dict[str, str]:\n # 👀 we can do things like perform cleanup logic\n # or notify a user here\n\n # 👀 Fetch the errors from upstream step runs from the context\n print(ctx.task_run_errors)\n\n return {"status": "success"}\n\n\n\n\n# > OnFailure With Details\n# We can access the failure details in the onFailure step\n# via the context method\n\non_failure_wf_with_details = hatchet.workflow(name="OnFailureWorkflowWithDetails")\n\n\n# ... defined as above\n@on_failure_wf_with_details.task(execution_timeout=timedelta(seconds=1))\ndef details_step1(input: EmptyModel, ctx: Context) -> None:\n raise Exception(ERROR_TEXT)\n\n\n# 👀 After the workflow fails, this special step will run\n@on_failure_wf_with_details.on_failure_task()\ndef details_on_failure(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n error = ctx.get_task_run_error(details_step1)\n\n if not error:\n return {"status": "unexpected success"}\n\n # 👀 we can access the failure details here\n assert isinstance(error, TaskRunError)\n\n if "step1 failed" in error.exc:\n return {\n "status": "success",\n "failed_run_external_id": error.task_run_external_id,\n }\n\n raise Exception("unexpected failure")\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "on-failure-worker",\n slots=4,\n workflows=[on_failure_wf, on_failure_wf_with_details],\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/on_failure/worker.py', - blocks: { - onfailure_step: { - start: 12, - stop: 35, - }, - onfailure_with_details: { - start: 39, - stop: 70, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/on_success/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/on_success/index.ts deleted file mode 100644 index 245a4ee68..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/on_success/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/on_success/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/on_success/trigger.ts deleted file mode 100644 index fda3b93b3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/on_success/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.on_success.worker import on_success_workflow\n\non_success_workflow.run_no_wait()\n', - source: 'out/python/on_success/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/on_success/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/on_success/worker.ts deleted file mode 100644 index bd7be3ac0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/on_success/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\non_success_workflow = hatchet.workflow(name="OnSuccessWorkflow")\n\n\n@on_success_workflow.task()\ndef first_task(input: EmptyModel, ctx: Context) -> None:\n print("First task completed successfully")\n\n\n@on_success_workflow.task(parents=[first_task])\ndef second_task(input: EmptyModel, ctx: Context) -> None:\n print("Second task completed successfully")\n\n\n@on_success_workflow.task(parents=[first_task, second_task])\ndef third_task(input: EmptyModel, ctx: Context) -> None:\n print("Third task completed successfully")\n\n\n@on_success_workflow.task()\ndef fourth_task(input: EmptyModel, ctx: Context) -> None:\n print("Fourth task completed successfully")\n\n\n@on_success_workflow.on_success_task()\ndef on_success_task(input: EmptyModel, ctx: Context) -> None:\n print("On success task completed successfully")\n\n\ndef main() -> None:\n worker = hatchet.worker("on-success-worker", workflows=[on_success_workflow])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/on_success/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/client.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/client.ts deleted file mode 100644 index 5d8350097..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: 'from hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n', - source: 'out/python/opentelemetry_instrumentation/client.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/index.ts deleted file mode 100644 index ecf90a3f3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import client from './client'; -import tracer from './tracer'; -import triggers from './triggers'; -import worker from './worker'; -import * as langfuse from './langfuse'; - -export { client }; -export { tracer }; -export { triggers }; -export { worker }; -export { langfuse }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/client.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/client.ts deleted file mode 100644 index d5c0900db..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/client.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import base64\nimport os\n\nfrom langfuse import Langfuse # type: ignore\nfrom langfuse.openai import AsyncOpenAI # type: ignore\n\n# > Configure Langfuse\nLANGFUSE_AUTH = base64.b64encode(\n f"{os.getenv(\'LANGFUSE_PUBLIC_KEY\')}:{os.getenv(\'LANGFUSE_SECRET_KEY\')}".encode()\n).decode()\n\nos.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = (\n os.getenv("LANGFUSE_HOST", "https://us.cloud.langfuse.com") + "/api/public/otel"\n)\nos.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}"\n\n## Note: Langfuse sets the global tracer provider, so you don\'t need to worry about it\nlf = Langfuse(\n public_key=os.getenv("LANGFUSE_PUBLIC_KEY"),\n secret_key=os.getenv("LANGFUSE_SECRET_KEY"),\n host=os.getenv("LANGFUSE_HOST", "https://app.langfuse.com"),\n)\n\n# > Create OpenAI client\nopenai = AsyncOpenAI(\n api_key=os.getenv("OPENAI_API_KEY"),\n)\n', - source: 'out/python/opentelemetry_instrumentation/langfuse/client.py', - blocks: { - configure_langfuse: { - start: 8, - stop: 22, - }, - create_openai_client: { - start: 25, - stop: 27, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/index.ts deleted file mode 100644 index 49744bfeb..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import client from './client'; -import trigger from './trigger'; -import worker from './worker'; - -export { client }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/trigger.ts deleted file mode 100644 index 7aa98c864..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/trigger.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# type: ignore\n\nimport asyncio\n\nfrom langfuse import get_client # type: ignore\nfrom opentelemetry.trace import StatusCode\n\nfrom examples.opentelemetry_instrumentation.langfuse.worker import langfuse_task\n\n# > Trigger task\ntracer = get_client()\n\n\nasync def main() -> None:\n # Traces will send to Langfuse\n # Use `_otel_tracer` to access the OpenTelemetry tracer if you need\n # to e.g. log statuses or attributes manually.\n with tracer._otel_tracer.start_as_current_span(name="trigger") as span:\n result = await langfuse_task.aio_run()\n location = result.get("location")\n\n if not location:\n span.set_status(StatusCode.ERROR)\n return\n\n span.set_attribute("location", location)\n\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/opentelemetry_instrumentation/langfuse/trigger.py', - blocks: { - trigger_task: { - start: 11, - stop: 28, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/worker.ts deleted file mode 100644 index a93152213..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/langfuse/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from opentelemetry.trace import get_tracer_provider\n\nfrom examples.opentelemetry_instrumentation.langfuse.client import openai\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\nfrom hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor\n\n# > Task\nHatchetInstrumentor(\n ## Langfuse sets the global tracer provider\n tracer_provider=get_tracer_provider(),\n).instrument()\n\nhatchet = Hatchet()\n\n\n@hatchet.task()\nasync def langfuse_task(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n ## Usage, cost, etc. of this call will be send to Langfuse\n generation = await openai.chat.completions.create(\n model="gpt-4o-mini",\n messages=[\n {"role": "system", "content": "You are a helpful assistant."},\n {"role": "user", "content": "Where does Anna Karenina take place?"},\n ],\n )\n\n location = generation.choices[0].message.content\n\n return {\n "location": location,\n }\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("langfuse-example-worker", workflows=[langfuse_task])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/opentelemetry_instrumentation/langfuse/worker.py', - blocks: { - task: { - start: 8, - stop: 33, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/tracer.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/tracer.ts deleted file mode 100644 index 6ca5ae782..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/tracer.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import os\nfrom typing import cast\n\nfrom opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter\nfrom opentelemetry.sdk.resources import SERVICE_NAME, Resource\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import BatchSpanProcessor\nfrom opentelemetry.trace import NoOpTracerProvider\n\ntrace_provider: TracerProvider | NoOpTracerProvider\n\nif os.getenv("CI", "false") == "true":\n trace_provider = NoOpTracerProvider()\nelse:\n resource = Resource(\n attributes={\n SERVICE_NAME: os.getenv("HATCHET_CLIENT_OTEL_SERVICE_NAME", "test-service")\n }\n )\n\n headers = dict(\n [\n cast(\n tuple[str, str],\n tuple(\n os.getenv(\n "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_HEADERS", "foo=bar"\n ).split("=")\n ),\n )\n ]\n )\n\n processor = BatchSpanProcessor(\n OTLPSpanExporter(\n endpoint=os.getenv(\n "HATCHET_CLIENT_OTEL_EXPORTER_OTLP_ENDPOINT", "http://localhost:4317"\n ),\n headers=headers,\n ),\n )\n\n trace_provider = TracerProvider(resource=resource)\n\n trace_provider.add_span_processor(processor)\n', - source: 'out/python/opentelemetry_instrumentation/tracer.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/triggers.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/triggers.ts deleted file mode 100644 index 0f40c3b8a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/triggers.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom examples.opentelemetry_instrumentation.client import hatchet\nfrom examples.opentelemetry_instrumentation.tracer import trace_provider\nfrom examples.opentelemetry_instrumentation.worker import otel_workflow\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\nfrom hatchet_sdk.clients.events import BulkPushEventWithMetadata, PushEventOptions\nfrom hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor\n\ninstrumentor = HatchetInstrumentor(tracer_provider=trace_provider)\ntracer = trace_provider.get_tracer(__name__)\n\n\nADDITIONAL_METADATA = {"hello": "world"}\n\n\ndef create_push_options() -> PushEventOptions:\n return PushEventOptions(additional_metadata=ADDITIONAL_METADATA)\n\n\ndef push_event() -> None:\n print("\\npush_event")\n with tracer.start_as_current_span("push_event"):\n hatchet.event.push(\n "otel:event",\n {"test": "test"},\n options=create_push_options(),\n )\n\n\nasync def async_push_event() -> None:\n print("\\nasync_push_event")\n with tracer.start_as_current_span("async_push_event"):\n await hatchet.event.aio_push(\n "otel:event", {"test": "test"}, options=create_push_options()\n )\n\n\ndef bulk_push_event() -> None:\n print("\\nbulk_push_event")\n with tracer.start_as_current_span("bulk_push_event"):\n hatchet.event.bulk_push(\n [\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 1"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 2"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n ],\n )\n\n\nasync def async_bulk_push_event() -> None:\n print("\\nasync_bulk_push_event")\n with tracer.start_as_current_span("bulk_push_event"):\n await hatchet.event.aio_bulk_push(\n [\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 1"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 2"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n ],\n )\n\n\ndef run_workflow() -> None:\n print("\\nrun_workflow")\n with tracer.start_as_current_span("run_workflow"):\n otel_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),\n )\n\n\nasync def async_run_workflow() -> None:\n print("\\nasync_run_workflow")\n with tracer.start_as_current_span("async_run_workflow"):\n await otel_workflow.aio_run(\n options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),\n )\n\n\ndef run_workflows() -> None:\n print("\\nrun_workflows")\n with tracer.start_as_current_span("run_workflows"):\n otel_workflow.run_many(\n [\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n ],\n )\n\n\nasync def async_run_workflows() -> None:\n print("\\nasync_run_workflows")\n with tracer.start_as_current_span("async_run_workflows"):\n await otel_workflow.aio_run_many(\n [\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n ],\n )\n\n\nasync def main() -> None:\n push_event()\n await async_push_event()\n bulk_push_event()\n await async_bulk_push_event()\n run_workflow()\n # await async_run_workflow()\n run_workflows()\n # await async_run_workflows()\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/opentelemetry_instrumentation/triggers.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/worker.ts deleted file mode 100644 index 6718f4a5b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/opentelemetry_instrumentation/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.opentelemetry_instrumentation.client import hatchet\nfrom examples.opentelemetry_instrumentation.tracer import trace_provider\nfrom hatchet_sdk import Context, EmptyModel\nfrom hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor\n\nHatchetInstrumentor(\n tracer_provider=trace_provider,\n).instrument()\n\notel_workflow = hatchet.workflow(\n name="OTelWorkflow",\n)\n\n\n@otel_workflow.task()\ndef your_spans_are_children_of_hatchet_span(\n input: EmptyModel, ctx: Context\n) -> dict[str, str]:\n with trace_provider.get_tracer(__name__).start_as_current_span("step1"):\n print("executed step")\n return {\n "foo": "bar",\n }\n\n\n@otel_workflow.task()\ndef your_spans_are_still_children_of_hatchet_span(\n input: EmptyModel, ctx: Context\n) -> None:\n with trace_provider.get_tracer(__name__).start_as_current_span("step2"):\n raise Exception("Manually instrumented step failed failed")\n\n\n@otel_workflow.task()\ndef this_step_is_still_instrumented(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print("executed still-instrumented step")\n return {\n "still": "instrumented",\n }\n\n\n@otel_workflow.task()\ndef this_step_is_also_still_instrumented(input: EmptyModel, ctx: Context) -> None:\n raise Exception("Still-instrumented step failed")\n\n\ndef main() -> None:\n worker = hatchet.worker("otel-example-worker", slots=1, workflows=[otel_workflow])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/opentelemetry_instrumentation/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/priority/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/priority/index.ts deleted file mode 100644 index 1d4ee8544..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/priority/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_priority from './test_priority'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_priority }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/priority/test_priority.ts b/frontend/app/src/next/lib/docs/generated/snips/python/priority/test_priority.ts deleted file mode 100644 index a1ac0988f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/priority/test_priority.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom collections.abc import AsyncGenerator\nfrom datetime import datetime, timedelta, timezone\nfrom random import choice\nfrom subprocess import Popen\nfrom typing import Any, Literal\nfrom uuid import uuid4\n\nimport pytest\nimport pytest_asyncio\nfrom pydantic import BaseModel\n\nfrom examples.priority.worker import DEFAULT_PRIORITY, SLEEP_TIME, priority_workflow\nfrom hatchet_sdk import Hatchet, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\nPriority = Literal["low", "medium", "high", "default"]\n\n\nclass RunPriorityStartedAt(BaseModel):\n priority: Priority\n started_at: datetime\n finished_at: datetime\n\n\ndef priority_to_int(priority: Priority) -> int:\n match priority:\n case "high":\n return 3\n case "medium":\n return 2\n case "low":\n return 1\n case "default":\n return DEFAULT_PRIORITY\n case _:\n raise ValueError(f"Invalid priority: {priority}")\n\n\n@pytest_asyncio.fixture(loop_scope="session", scope="function")\nasync def dummy_runs() -> None:\n priority: Priority = "high"\n\n await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority)),\n additional_metadata={\n "priority": priority,\n "key": ix,\n "type": "dummy",\n },\n )\n )\n for ix in range(40)\n ]\n )\n\n await asyncio.sleep(3)\n\n return\n\n\n@pytest.mark.parametrize(\n "on_demand_worker",\n [\n (\n ["poetry", "run", "python", "examples/priority/worker.py", "--slots", "1"],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_priority(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n choices: list[Priority] = ["low", "medium", "high", "default"]\n N = 30\n\n run_refs = await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n "priority": priority,\n "key": ix,\n "test_run_id": test_run_id,\n },\n )\n )\n for ix in range(N)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(workflow_name=priority_workflow.name)\n ).rows\n\n assert workflows\n\n workflow = next((w for w in workflows if w.name == priority_workflow.name), None)\n\n assert workflow\n\n assert workflow.name == priority_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n "test_run_id": test_run_id,\n },\n limit=1_000,\n )\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get("priority") or "low",\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(run_refs)\n assert len(runs_ids_started_ats) == N\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n """Run start times should be in order of priority"""\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n """Runs should proceed one at a time"""\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n """Runs should finish after starting (this is mostly a test for engine datetime handling bugs)"""\n assert curr.finished_at >= curr.started_at\n\n\n@pytest.mark.parametrize(\n "on_demand_worker",\n [\n (\n ["poetry", "run", "python", "examples/priority/worker.py", "--slots", "1"],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_priority_via_scheduling(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n sleep_time = 3\n n = 30\n choices: list[Priority] = ["low", "medium", "high", "default"]\n run_at = datetime.now(tz=timezone.utc) + timedelta(seconds=sleep_time)\n\n versions = await asyncio.gather(\n *[\n priority_workflow.aio_schedule(\n run_at=run_at,\n options=ScheduleTriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n "priority": priority,\n "key": ix,\n "test_run_id": test_run_id,\n },\n ),\n )\n for ix in range(n)\n ]\n )\n\n await asyncio.sleep(sleep_time * 2)\n\n workflow_id = versions[0].workflow_id\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError("Timed out waiting for runs to finish")\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n "test_run_id": test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError("One or more runs failed or were cancelled")\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get("priority") or "low",\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(versions)\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n """Run start times should be in order of priority"""\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n """Runs should proceed one at a time"""\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n """Runs should finish after starting (this is mostly a test for engine datetime handling bugs)"""\n assert curr.finished_at >= curr.started_at\n\n\n@pytest_asyncio.fixture(loop_scope="session", scope="function")\nasync def crons(\n hatchet: Hatchet, dummy_runs: None\n) -> AsyncGenerator[tuple[str, str, int], None]:\n test_run_id = str(uuid4())\n choices: list[Priority] = ["low", "medium", "high"]\n n = 30\n\n crons = await asyncio.gather(\n *[\n hatchet.cron.aio_create(\n workflow_name=priority_workflow.name,\n cron_name=f"{test_run_id}-cron-{i}",\n expression="* * * * *",\n input={},\n additional_metadata={\n "trigger": "cron",\n "test_run_id": test_run_id,\n "priority": (priority := choice(choices)),\n "key": str(i),\n },\n priority=(priority_to_int(priority)),\n )\n for i in range(n)\n ]\n )\n\n yield crons[0].workflow_id, test_run_id, n\n\n await asyncio.gather(*[hatchet.cron.aio_delete(cron.metadata.id) for cron in crons])\n\n\ndef time_until_next_minute() -> float:\n now = datetime.now(tz=timezone.utc)\n next_minute = (now + timedelta(minutes=1)).replace(second=0, microsecond=0)\n\n return (next_minute - now).total_seconds()\n\n\n@pytest.mark.skip(\n reason="Test is flaky because the first jobs that are picked up don\'t necessarily go in priority order"\n)\n@pytest.mark.parametrize(\n "on_demand_worker",\n [\n (\n ["poetry", "run", "python", "examples/priority/worker.py", "--slots", "1"],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_priority_via_cron(\n hatchet: Hatchet, crons: tuple[str, str, int], on_demand_worker: Popen[Any]\n) -> None:\n workflow_id, test_run_id, n = crons\n\n await asyncio.sleep(time_until_next_minute() + 10)\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError("Timed out waiting for runs to finish")\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n "test_run_id": test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError("One or more runs failed or were cancelled")\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get("priority") or "low",\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == n\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n """Run start times should be in order of priority"""\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n """Runs should proceed one at a time"""\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n """Runs should finish after starting (this is mostly a test for engine datetime handling bugs)"""\n assert curr.finished_at >= curr.started_at\n', - source: 'out/python/priority/test_priority.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/priority/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/priority/trigger.ts deleted file mode 100644 index 5e0739366..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/priority/trigger.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from datetime import datetime, timedelta, timezone\n\nfrom examples.priority.worker import priority_workflow\nfrom hatchet_sdk import ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions\n\npriority_workflow.run_no_wait()\n\n# > Runtime priority\nlow_prio = priority_workflow.run_no_wait(\n options=TriggerWorkflowOptions(\n ## 👀 Adding priority and key to metadata to show them in the dashboard\n priority=1,\n additional_metadata={"priority": "low", "key": 1},\n )\n)\n\nhigh_prio = priority_workflow.run_no_wait(\n options=TriggerWorkflowOptions(\n ## 👀 Adding priority and key to metadata to show them in the dashboard\n priority=3,\n additional_metadata={"priority": "high", "key": 1},\n )\n)\n\n# > Scheduled priority\nschedule = priority_workflow.schedule(\n run_at=datetime.now(tz=timezone.utc) + timedelta(minutes=1),\n options=ScheduleTriggerWorkflowOptions(priority=3),\n)\n\ncron = priority_workflow.create_cron(\n cron_name="my-scheduled-cron",\n expression="0 * * * *",\n priority=3,\n)\n\n# > Default priority\nlow_prio = priority_workflow.run_no_wait(\n options=TriggerWorkflowOptions(\n ## 👀 Adding priority and key to metadata to show them in the dashboard\n priority=1,\n additional_metadata={"priority": "low", "key": 2},\n )\n)\nhigh_prio = priority_workflow.run_no_wait(\n options=TriggerWorkflowOptions(\n ## 👀 Adding priority and key to metadata to show them in the dashboard\n priority=3,\n additional_metadata={"priority": "high", "key": 2},\n )\n)\n', - source: 'out/python/priority/trigger.py', - blocks: { - runtime_priority: { - start: 9, - stop: 23, - }, - scheduled_priority: { - start: 26, - stop: 35, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/priority/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/priority/worker.ts deleted file mode 100644 index 473341f8f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/priority/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# > Default priority\nDEFAULT_PRIORITY = 1\nSLEEP_TIME = 0.25\n\npriority_workflow = hatchet.workflow(\n name="PriorityWorkflow",\n default_priority=DEFAULT_PRIORITY,\n)\n\n\n@priority_workflow.task()\ndef priority_task(input: EmptyModel, ctx: Context) -> None:\n print("Priority:", ctx.priority)\n time.sleep(SLEEP_TIME)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "priority-worker",\n slots=1,\n workflows=[priority_workflow],\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/priority/worker.py', - blocks: { - default_priority: { - start: 8, - stop: 14, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/README.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/README.ts deleted file mode 100644 index cf6b9fdcb..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/README.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - "## Hatchet Python Quickstart\n\nThis is an example project demonstrating how to use Hatchet with Python. For detailed setup instructions, see the [Hatchet Setup Guide](https://docs.hatchet.run/home/setup).\n\n## Prerequisites\n\nBefore running this project, make sure you have the following:\n\n1. [Python v3.10 or higher](https://www.python.org/downloads/)\n2. [Poetry](https://python-poetry.org/docs/#installation) for dependency management\n\n## Setup\n\n1. Clone the repository:\n\n```bash\ngit clone https://github.com/hatchet-dev/hatchet-python-quickstart.git\ncd hatchet-python-quickstart\n```\n\n2. Set the required environment variable `HATCHET_CLIENT_TOKEN` created in the [Getting Started Guide](https://docs.hatchet.run/home/hatchet-cloud-quickstart).\n\n```bash\nexport HATCHET_CLIENT_TOKEN=\n```\n\n> Note: If you're self hosting you may need to set `HATCHET_CLIENT_TLS_STRATEGY=none` to disable TLS\n\n3. Install the project dependencies:\n\n```bash\npoetry install\n```\n\n### Running an example\n\n1. Start a Hatchet worker by running the following command:\n\n```shell\npoetry run python src/worker.py\n```\n\n2. To run the example workflow, open a new terminal and run the following command:\n\n```shell\npoetry run python src/run.py\n```\n\nThis will trigger the workflow on the worker running in the first terminal and print the output to the the second terminal.\n", - source: 'out/python/quickstart/README.md', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/__init__.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/__init__.ts deleted file mode 100644 index 83b75dee9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/__init__.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: '', - source: 'out/python/quickstart/__init__.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/gitignore.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/gitignore.ts deleted file mode 100644 index 4738ba9cf..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/gitignore.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - 'certs/\n\n# Environments\n.env\nenv/\nvenv/\n.venv/\n__pycache__/\n*.py[cod]\n*$py.class\n.Python\n.pytest_cache/\n.coverage\nhtmlcov/\n\n# Distribution / packaging\ndist/\nbuild/\n*.egg-info/\n*.egg\n\n.DS_Store\n\nindex/index.json\n', - source: 'out/python/quickstart/.gitignore', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/hatchet_client.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/hatchet_client.ts deleted file mode 100644 index 9ffe884b2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/hatchet_client.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Hatchet\n\n# Initialize Hatchet client\nhatchet = Hatchet()\n', - source: 'out/python/quickstart/hatchet_client.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/index.ts deleted file mode 100644 index 86f832bbb..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -import readme from './README'; -import __init__ from './__init__'; -import gitignore from './gitignore'; -import hatchet_client from './hatchet_client'; -import poetry from './poetry'; -import pyproject from './pyproject'; -import run from './run'; -import worker from './worker'; -import * as workflows from './workflows'; - -export { readme }; -export { __init__ }; -export { gitignore }; -export { hatchet_client }; -export { poetry }; -export { pyproject }; -export { run }; -export { worker }; -export { workflows }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/poetry.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/poetry.ts deleted file mode 100644 index 6252bde2b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/poetry.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - '# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand.\n\n[[package]]\nname = "aiohappyeyeballs"\nversion = "2.6.1"\ndescription = "Happy Eyeballs for asyncio"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"},\n {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"},\n]\n\n[[package]]\nname = "aiohttp"\nversion = "3.11.14"\ndescription = "Async http client/server framework (asyncio)"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e2bc827c01f75803de77b134afdbf74fa74b62970eafdf190f3244931d7a5c0d"},\n {file = "aiohttp-3.11.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e365034c5cf6cf74f57420b57682ea79e19eb29033399dd3f40de4d0171998fa"},\n {file = "aiohttp-3.11.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c32593ead1a8c6aabd58f9d7ee706e48beac796bb0cb71d6b60f2c1056f0a65f"},\n {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4e7c7ec4146a94a307ca4f112802a8e26d969018fabed526efc340d21d3e7d0"},\n {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8b2df9feac55043759aa89f722a967d977d80f8b5865a4153fc41c93b957efc"},\n {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7571f99525c76a6280f5fe8e194eeb8cb4da55586c3c61c59c33a33f10cfce7"},\n {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b59d096b5537ec7c85954cb97d821aae35cfccce3357a2cafe85660cc6295628"},\n {file = "aiohttp-3.11.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b42dbd097abb44b3f1156b4bf978ec5853840802d6eee2784857be11ee82c6a0"},\n {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b05774864c87210c531b48dfeb2f7659407c2dda8643104fb4ae5e2c311d12d9"},\n {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4e2e8ef37d4bc110917d038807ee3af82700a93ab2ba5687afae5271b8bc50ff"},\n {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e9faafa74dbb906b2b6f3eb9942352e9e9db8d583ffed4be618a89bd71a4e914"},\n {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:7e7abe865504f41b10777ac162c727af14e9f4db9262e3ed8254179053f63e6d"},\n {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:4848ae31ad44330b30f16c71e4f586cd5402a846b11264c412de99fa768f00f3"},\n {file = "aiohttp-3.11.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2d0b46abee5b5737cb479cc9139b29f010a37b1875ee56d142aefc10686a390b"},\n {file = "aiohttp-3.11.14-cp310-cp310-win32.whl", hash = "sha256:a0d2c04a623ab83963576548ce098baf711a18e2c32c542b62322a0b4584b990"},\n {file = "aiohttp-3.11.14-cp310-cp310-win_amd64.whl", hash = "sha256:5409a59d5057f2386bb8b8f8bbcfb6e15505cedd8b2445db510563b5d7ea1186"},\n {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325"},\n {file = "aiohttp-3.11.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b"},\n {file = "aiohttp-3.11.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f"},\n {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a"},\n {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3"},\n {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b"},\n {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1"},\n {file = "aiohttp-3.11.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77"},\n {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c"},\n {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06"},\n {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1"},\n {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3"},\n {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e"},\n {file = "aiohttp-3.11.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881"},\n {file = "aiohttp-3.11.14-cp311-cp311-win32.whl", hash = "sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e"},\n {file = "aiohttp-3.11.14-cp311-cp311-win_amd64.whl", hash = "sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654"},\n {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc"},\n {file = "aiohttp-3.11.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a"},\n {file = "aiohttp-3.11.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948"},\n {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534"},\n {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f"},\n {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307"},\n {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3"},\n {file = "aiohttp-3.11.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0"},\n {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6"},\n {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f"},\n {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09"},\n {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce"},\n {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b"},\n {file = "aiohttp-3.11.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be"},\n {file = "aiohttp-3.11.14-cp312-cp312-win32.whl", hash = "sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f"},\n {file = "aiohttp-3.11.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c"},\n {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50"},\n {file = "aiohttp-3.11.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965"},\n {file = "aiohttp-3.11.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228"},\n {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5"},\n {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db"},\n {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0"},\n {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91"},\n {file = "aiohttp-3.11.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d"},\n {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734"},\n {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058"},\n {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73"},\n {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33"},\n {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49"},\n {file = "aiohttp-3.11.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647"},\n {file = "aiohttp-3.11.14-cp313-cp313-win32.whl", hash = "sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6"},\n {file = "aiohttp-3.11.14-cp313-cp313-win_amd64.whl", hash = "sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3"},\n {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:14fc03508359334edc76d35b2821832f092c8f092e4b356e74e38419dfe7b6de"},\n {file = "aiohttp-3.11.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92007c89a8cb7be35befa2732b0b32bf3a394c1b22ef2dff0ef12537d98a7bda"},\n {file = "aiohttp-3.11.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6d3986112e34eaa36e280dc8286b9dd4cc1a5bcf328a7f147453e188f6fe148f"},\n {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:749f1eb10e51dbbcdba9df2ef457ec060554842eea4d23874a3e26495f9e87b1"},\n {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:781c8bd423dcc4641298c8c5a2a125c8b1c31e11f828e8d35c1d3a722af4c15a"},\n {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:997b57e38aa7dc6caab843c5e042ab557bc83a2f91b7bd302e3c3aebbb9042a1"},\n {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a8b0321e40a833e381d127be993b7349d1564b756910b28b5f6588a159afef3"},\n {file = "aiohttp-3.11.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8778620396e554b758b59773ab29c03b55047841d8894c5e335f12bfc45ebd28"},\n {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e906da0f2bcbf9b26cc2b144929e88cb3bf943dd1942b4e5af066056875c7618"},\n {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:87f0e003fb4dd5810c7fbf47a1239eaa34cd929ef160e0a54c570883125c4831"},\n {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7f2dadece8b85596ac3ab1ec04b00694bdd62abc31e5618f524648d18d9dd7fa"},\n {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:fe846f0a98aa9913c2852b630cd39b4098f296e0907dd05f6c7b30d911afa4c3"},\n {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ced66c5c6ad5bcaf9be54560398654779ec1c3695f1a9cf0ae5e3606694a000a"},\n {file = "aiohttp-3.11.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a40087b82f83bd671cbeb5f582c233d196e9653220404a798798bfc0ee189fff"},\n {file = "aiohttp-3.11.14-cp39-cp39-win32.whl", hash = "sha256:95d7787f2bcbf7cb46823036a8d64ccfbc2ffc7d52016b4044d901abceeba3db"},\n {file = "aiohttp-3.11.14-cp39-cp39-win_amd64.whl", hash = "sha256:22a8107896877212130c58f74e64b77f7007cb03cea8698be317272643602d45"},\n {file = "aiohttp-3.11.14.tar.gz", hash = "sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c"},\n]\n\n[package.dependencies]\naiohappyeyeballs = ">=2.3.0"\naiosignal = ">=1.1.2"\nasync-timeout = {version = ">=4.0,<6.0", markers = "python_version < \\"3.11\\""}\nattrs = ">=17.3.0"\nfrozenlist = ">=1.1.1"\nmultidict = ">=4.5,<7.0"\npropcache = ">=0.2.0"\nyarl = ">=1.17.0,<2.0"\n\n[package.extras]\nspeedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]\n\n[[package]]\nname = "aiohttp-retry"\nversion = "2.9.1"\ndescription = "Simple retry client for aiohttp"\noptional = false\npython-versions = ">=3.7"\ngroups = ["main"]\nfiles = [\n {file = "aiohttp_retry-2.9.1-py3-none-any.whl", hash = "sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54"},\n {file = "aiohttp_retry-2.9.1.tar.gz", hash = "sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1"},\n]\n\n[package.dependencies]\naiohttp = "*"\n\n[[package]]\nname = "aiosignal"\nversion = "1.3.2"\ndescription = "aiosignal: a list of registered asynchronous callbacks"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"},\n {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"},\n]\n\n[package.dependencies]\nfrozenlist = ">=1.1.0"\n\n[[package]]\nname = "aiostream"\nversion = "0.5.2"\ndescription = "Generator-based operators for asynchronous iteration"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "aiostream-0.5.2-py3-none-any.whl", hash = "sha256:054660370be9d37f6fe3ece3851009240416bd082e469fd90cc8673d3818cf71"},\n {file = "aiostream-0.5.2.tar.gz", hash = "sha256:b71b519a2d66c38f0872403ab86417955b77352f08d9ad02ad46fc3926b389f4"},\n]\n\n[package.dependencies]\ntyping-extensions = "*"\n\n[[package]]\nname = "annotated-types"\nversion = "0.7.0"\ndescription = "Reusable constraint types to use with typing.Annotated"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},\n {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},\n]\n\n[[package]]\nname = "async-timeout"\nversion = "5.0.1"\ndescription = "Timeout context manager for asyncio programs"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nmarkers = "python_version < \\"3.11\\""\nfiles = [\n {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"},\n {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},\n]\n\n[[package]]\nname = "attrs"\nversion = "25.3.0"\ndescription = "Classes Without Boilerplate"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"},\n {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"},\n]\n\n[package.extras]\nbenchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]\ncov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]\ndev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]\ndocs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]\ntests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]\ntests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]\n\n[[package]]\nname = "cel-python"\nversion = "0.2.0"\ndescription = "Pure Python implementation of Google Common Expression Language"\noptional = false\npython-versions = "<4.0,>=3.8"\ngroups = ["main"]\nfiles = [\n {file = "cel_python-0.2.0-py3-none-any.whl", hash = "sha256:478ff73def7b39d51e6982f95d937a57c2b088c491c578fe5cecdbd79f476f60"},\n {file = "cel_python-0.2.0.tar.gz", hash = "sha256:75de72a5cf223ec690b236f0cc24da267219e667bd3e7f8f4f20595fcc1c0c0f"},\n]\n\n[package.dependencies]\njmespath = ">=1.0.1,<2.0.0"\nlark = ">=0.12.0,<0.13.0"\npython-dateutil = ">=2.9.0.post0,<3.0.0"\npyyaml = ">=6.0.1,<7.0.0"\ntypes-python-dateutil = ">=2.9.0.20240316,<3.0.0.0"\ntypes-pyyaml = ">=6.0.12.20240311,<7.0.0.0"\n\n[[package]]\nname = "frozenlist"\nversion = "1.5.0"\ndescription = "A list-like structure which implements collections.abc.MutableSequence"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"},\n {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"},\n {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"},\n {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"},\n {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"},\n {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"},\n {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"},\n {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"},\n {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"},\n {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"},\n {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"},\n {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"},\n {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"},\n {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"},\n {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"},\n {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"},\n {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"},\n {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"},\n {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"},\n {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"},\n {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"},\n {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"},\n {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"},\n {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"},\n {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"},\n {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"},\n {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"},\n {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"},\n {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"},\n {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"},\n {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"},\n {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"},\n {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"},\n {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"},\n {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"},\n {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"},\n {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"},\n {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"},\n {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"},\n {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"},\n {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"},\n {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"},\n {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"},\n {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"},\n {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"},\n {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"},\n {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"},\n {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"},\n {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"},\n {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"},\n {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"},\n {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"},\n {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"},\n {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"},\n {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"},\n {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"},\n {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"},\n {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"},\n {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"},\n {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"},\n {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"},\n {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"},\n {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"},\n {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"},\n {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"},\n {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"},\n {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"},\n {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"},\n {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"},\n {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"},\n {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"},\n {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"},\n {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"},\n {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"},\n {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"},\n {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"},\n {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"},\n {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"},\n {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"},\n {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"},\n {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"},\n {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"},\n {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"},\n {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"},\n {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"},\n {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"},\n {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"},\n {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"},\n {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"},\n {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"},\n {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"},\n {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"},\n]\n\n[[package]]\nname = "grpcio"\nversion = "1.71.0"\ndescription = "HTTP/2-based RPC framework"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"},\n {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"},\n {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"},\n {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"},\n {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"},\n {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"},\n {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"},\n {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"},\n {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"},\n {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"},\n {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"},\n {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"},\n {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"},\n {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"},\n {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"},\n {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"},\n {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"},\n {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"},\n {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"},\n {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"},\n {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"},\n {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"},\n {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"},\n {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"},\n {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"},\n {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"},\n {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"},\n {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"},\n {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"},\n {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"},\n {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"},\n {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"},\n {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"},\n {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"},\n {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"},\n {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"},\n {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"},\n {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"},\n {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"},\n {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"},\n {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"},\n {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"},\n {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"},\n {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"},\n {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"},\n {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"},\n {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"},\n {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"},\n {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"},\n {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"},\n {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"},\n]\n\n[package.extras]\nprotobuf = ["grpcio-tools (>=1.71.0)"]\n\n[[package]]\nname = "grpcio-tools"\nversion = "1.71.0"\ndescription = "Protobuf code generator for gRPC"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "grpcio_tools-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:f4ad7f0d756546902597053d70b3af2606fbd70d7972876cd75c1e241d22ae00"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:64bdb291df61cf570b5256777ad5fe2b1db6d67bc46e55dc56a0a862722ae329"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:8dd9795e982d77a4b496f7278b943c2563d9afde2069cdee78c111a40cc4d675"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c1b5860c41a36b26fec4f52998f1a451d0525a5c9a4fb06b6ea3e9211abdb925"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3059c14035e5dc03d462f261e5900b9a077fd1a36976c3865b8507474520bad4"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f360981b215b1d5aff9235b37e7e1826246e35bbac32a53e41d4e990a37b8f4c"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bfe3888c3bbe16a5aa39409bc38744a31c0c3d2daa2b0095978c56e106c85b42"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:145985c0bf12131f0a1503e65763e0f060473f7f3928ed1ff3fb0e8aad5bc8ac"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-win32.whl", hash = "sha256:82c430edd939bb863550ee0fecf067d78feff828908a1b529bbe33cc57f2419c"},\n {file = "grpcio_tools-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:83e90724e3f02415c628e4ead1d6ffe063820aaaa078d9a39176793df958cd5a"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:1f19b16b49afa5d21473f49c0966dd430c88d089cd52ac02404d8cef67134efb"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:459c8f5e00e390aecd5b89de67deb3ec7188a274bc6cb50e43cef35ab3a3f45d"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:edab7e6518de01196be37f96cb1e138c3819986bf5e2a6c9e1519b4d716b2f5a"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b93b9f6adc7491d4c10144c0643409db298e5e63c997106a804f6f0248dbaf4"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ae5f2efa9e644c10bf1021600bfc099dfbd8e02b184d2d25dc31fcd6c2bc59e"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:65aa082f4435571d65d5ce07fc444f23c3eff4f3e34abef599ef8c9e1f6f360f"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1331e726e08b7bdcbf2075fcf4b47dff07842b04845e6e220a08a4663e232d7f"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6693a7d3ba138b0e693b3d1f687cdd9db9e68976c3fa2b951c17a072fea8b583"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-win32.whl", hash = "sha256:6d11ed3ff7b6023b5c72a8654975324bb98c1092426ba5b481af406ff559df00"},\n {file = "grpcio_tools-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:072b2a5805ac97e4623b3aa8f7818275f3fb087f4aa131b0fce00471065f6eaa"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:61c0409d5bdac57a7bd0ce0ab01c1c916728fe4c8a03d77a25135ad481eb505c"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:28784f39921d061d2164a9dcda5164a69d07bf29f91f0ea50b505958292312c9"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:192808cf553cedca73f0479cc61d5684ad61f24db7a5f3c4dfe1500342425866"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989ee9da61098230d3d4c8f8f8e27c2de796f1ff21b1c90110e636d9acd9432b"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:541a756276c8a55dec991f6c0106ae20c8c8f5ce8d0bdbfcb01e2338d1a8192b"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:870c0097700d13c403e5517cb7750ab5b4a791ce3e71791c411a38c5468b64bd"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:abd57f615e88bf93c3c6fd31f923106e3beb12f8cd2df95b0d256fa07a7a0a57"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:753270e2d06d37e6d7af8967d1d059ec635ad215882041a36294f4e2fd502b2e"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-win32.whl", hash = "sha256:0e647794bd7138b8c215e86277a9711a95cf6a03ff6f9e555d54fdf7378b9f9d"},\n {file = "grpcio_tools-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:48debc879570972d28bfe98e4970eff25bb26da3f383e0e49829b2d2cd35ad87"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:9a78d07d6c301a25ef5ede962920a522556a1dfee1ccc05795994ceb867f766c"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:580ac88141c9815557e63c9c04f5b1cdb19b4db8d0cb792b573354bde1ee8b12"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f7c678e68ece0ae908ecae1c4314a0c2c7f83e26e281738b9609860cc2c82d96"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56ecd6cc89b5e5eed1de5eb9cafce86c9c9043ee3840888cc464d16200290b53"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e52a041afc20ab2431d756b6295d727bd7adee813b21b06a3483f4a7a15ea15f"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2a1712f12102b60c8d92779b89d0504e0d6f3a59f2b933e5622b8583f5c02992"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:41878cb7a75477e62fdd45e7e9155b3af1b7a5332844021e2511deaf99ac9e6c"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:682e958b476049ccc14c71bedf3f979bced01f6e0c04852efc5887841a32ad6b"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-win32.whl", hash = "sha256:0ccfb837152b7b858b9f26bb110b3ae8c46675d56130f6c2f03605c4f129be13"},\n {file = "grpcio_tools-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:ffff9bc5eacb34dd26b487194f7d44a3e64e752fc2cf049d798021bf25053b87"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:834959b6eceb85de5217a411aba1643b5f782798680c122202d6a06177226644"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:e3ae9556e2a1cd70e7d7b0e0459c35af71d51a7dae4cf36075068011a69f13ec"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:77fe6db1334e0ce318b2cb4e70afa94e0c173ed1a533d37aea69ad9f61ae8ea9"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57e3e2544c306b60ef2d76570bac4e977be1ad548641c9eec130c3bc47e80141"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af39e245fa56f7f5c2fe86b7d6c1b78f395c07e54d5613cbdbb3c24769a92b6e"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8f987d0053351217954543b174b0bddbf51d45b3cfcf8d6de97b0a43d264d753"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8e6cdbba4dae7b37b0d25d074614be9936fb720144420f03d9f142a80be69ba2"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3adc8b229e60c77bab5a5d62b415667133bd5ced7d59b5f71d6317c9143631e"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f68334d28a267fabec6e70cb5986e9999cfbfd14db654094ddf9aedd804a293a"},\n {file = "grpcio_tools-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:1291a6136c07a86c3bb09f6c33f5cf227cc14956edd1b85cb572327a36e0aef8"},\n {file = "grpcio_tools-1.71.0.tar.gz", hash = "sha256:38dba8e0d5e0fb23a034e09644fdc6ed862be2371887eee54901999e8f6792a8"},\n]\n\n[package.dependencies]\ngrpcio = ">=1.71.0"\nprotobuf = ">=5.26.1,<6.0dev"\nsetuptools = "*"\n\n[[package]]\nname = "hatchet-sdk"\nversion = "1.0.0a1"\ndescription = ""\noptional = false\npython-versions = "<4.0,>=3.10"\ngroups = ["main"]\nfiles = [\n {file = "hatchet_sdk-1.0.0a1-py3-none-any.whl", hash = "sha256:bfc84358c8842cecd0d95b30645109733b7292dff0db1a776ca862785ee93d7f"},\n {file = "hatchet_sdk-1.0.0a1.tar.gz", hash = "sha256:f0272bbaac6faed75ff727826e9f7b1ac42ae597f9b590e14d392aada9c9692f"},\n]\n\n[package.dependencies]\naiohttp = ">=3.10.5,<4.0.0"\naiohttp-retry = ">=2.8.3,<3.0.0"\naiostream = ">=0.5.2,<0.6.0"\ncel-python = ">=0.2.0,<0.3.0"\ngrpcio = [\n {version = ">=1.64.1,<1.68.dev0 || >=1.69.dev0", markers = "python_version < \\"3.13\\""},\n {version = ">=1.69.0", markers = "python_version >= \\"3.13\\""},\n]\ngrpcio-tools = [\n {version = ">=1.64.1,<1.68.dev0 || >=1.69.dev0", markers = "python_version < \\"3.13\\""},\n {version = ">=1.69.0", markers = "python_version >= \\"3.13\\""},\n]\nnest-asyncio = ">=1.6.0,<2.0.0"\nprometheus-client = ">=0.21.1,<0.22.0"\nprotobuf = ">=5.29.1,<6.0.0"\npydantic = ">=2.6.3,<3.0.0"\npydantic-settings = ">=2.7.1,<3.0.0"\npython-dateutil = ">=2.9.0.post0,<3.0.0"\npyyaml = ">=6.0.1,<7.0.0"\ntenacity = ">=8.4.1"\nurllib3 = ">=1.26.20"\n\n[package.extras]\notel = ["opentelemetry-api (>=1.28.0,<2.0.0)", "opentelemetry-distro (>=0.49b0)", "opentelemetry-exporter-otlp (>=1.28.0,<2.0.0)", "opentelemetry-exporter-otlp-proto-http (>=1.28.0,<2.0.0)", "opentelemetry-instrumentation (>=0.49b0)", "opentelemetry-sdk (>=1.28.0,<2.0.0)"]\n\n[[package]]\nname = "idna"\nversion = "3.10"\ndescription = "Internationalized Domain Names in Applications (IDNA)"\noptional = false\npython-versions = ">=3.6"\ngroups = ["main"]\nfiles = [\n {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},\n {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},\n]\n\n[package.extras]\nall = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]\n\n[[package]]\nname = "jmespath"\nversion = "1.0.1"\ndescription = "JSON Matching Expressions"\noptional = false\npython-versions = ">=3.7"\ngroups = ["main"]\nfiles = [\n {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},\n {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},\n]\n\n[[package]]\nname = "lark"\nversion = "0.12.0"\ndescription = "a modern parsing library"\noptional = false\npython-versions = "*"\ngroups = ["main"]\nfiles = [\n {file = "lark-0.12.0-py2.py3-none-any.whl", hash = "sha256:ed1d891cbcf5151ead1c1d14663bf542443e579e63a76ae175b01b899bd854ca"},\n {file = "lark-0.12.0.tar.gz", hash = "sha256:7da76fcfddadabbbbfd949bbae221efd33938451d90b1fefbbc423c3cccf48ef"},\n]\n\n[package.extras]\natomic-cache = ["atomicwrites"]\nnearley = ["js2py"]\nregex = ["regex"]\n\n[[package]]\nname = "multidict"\nversion = "6.2.0"\ndescription = "multidict implementation"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1"},\n {file = "multidict-6.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2"},\n {file = "multidict-6.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e"},\n {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a"},\n {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de"},\n {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d"},\n {file = "multidict-6.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3"},\n {file = "multidict-6.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a"},\n {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a"},\n {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49"},\n {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191"},\n {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb"},\n {file = "multidict-6.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a"},\n {file = "multidict-6.2.0-cp310-cp310-win32.whl", hash = "sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460"},\n {file = "multidict-6.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1"},\n {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46"},\n {file = "multidict-6.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932"},\n {file = "multidict-6.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf"},\n {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf"},\n {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc"},\n {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1"},\n {file = "multidict-6.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081"},\n {file = "multidict-6.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98"},\n {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633"},\n {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e"},\n {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d"},\n {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4"},\n {file = "multidict-6.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2"},\n {file = "multidict-6.2.0-cp311-cp311-win32.whl", hash = "sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d"},\n {file = "multidict-6.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86"},\n {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b"},\n {file = "multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4"},\n {file = "multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44"},\n {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd"},\n {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e"},\n {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c"},\n {file = "multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87"},\n {file = "multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29"},\n {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd"},\n {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8"},\n {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df"},\n {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d"},\n {file = "multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b"},\n {file = "multidict-6.2.0-cp312-cp312-win32.whl", hash = "sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626"},\n {file = "multidict-6.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c"},\n {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80"},\n {file = "multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16"},\n {file = "multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e"},\n {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817"},\n {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc"},\n {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1"},\n {file = "multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844"},\n {file = "multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48"},\n {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0"},\n {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f"},\n {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de"},\n {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02"},\n {file = "multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d"},\n {file = "multidict-6.2.0-cp313-cp313-win32.whl", hash = "sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e"},\n {file = "multidict-6.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2"},\n {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7"},\n {file = "multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b"},\n {file = "multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e"},\n {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025"},\n {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd"},\n {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7"},\n {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af"},\n {file = "multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331"},\n {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c"},\n {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b"},\n {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151"},\n {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019"},\n {file = "multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547"},\n {file = "multidict-6.2.0-cp313-cp313t-win32.whl", hash = "sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc"},\n {file = "multidict-6.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44"},\n {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a"},\n {file = "multidict-6.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac"},\n {file = "multidict-6.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88"},\n {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133"},\n {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656"},\n {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349"},\n {file = "multidict-6.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f"},\n {file = "multidict-6.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872"},\n {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2"},\n {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27"},\n {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90"},\n {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf"},\n {file = "multidict-6.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2"},\n {file = "multidict-6.2.0-cp39-cp39-win32.whl", hash = "sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d"},\n {file = "multidict-6.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3"},\n {file = "multidict-6.2.0-py3-none-any.whl", hash = "sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530"},\n {file = "multidict-6.2.0.tar.gz", hash = "sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8"},\n]\n\n[package.dependencies]\ntyping-extensions = {version = ">=4.1.0", markers = "python_version < \\"3.11\\""}\n\n[[package]]\nname = "nest-asyncio"\nversion = "1.6.0"\ndescription = "Patch asyncio to allow nested event loops"\noptional = false\npython-versions = ">=3.5"\ngroups = ["main"]\nfiles = [\n {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},\n {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},\n]\n\n[[package]]\nname = "prometheus-client"\nversion = "0.21.1"\ndescription = "Python client for the Prometheus monitoring system."\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"},\n {file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"},\n]\n\n[package.extras]\ntwisted = ["twisted"]\n\n[[package]]\nname = "propcache"\nversion = "0.3.0"\ndescription = "Accelerated property cache"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d"},\n {file = "propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c"},\n {file = "propcache-0.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc"},\n {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d"},\n {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f"},\n {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf"},\n {file = "propcache-0.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9"},\n {file = "propcache-0.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc"},\n {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0"},\n {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b"},\n {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f"},\n {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a"},\n {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25"},\n {file = "propcache-0.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f"},\n {file = "propcache-0.3.0-cp310-cp310-win32.whl", hash = "sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c"},\n {file = "propcache-0.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340"},\n {file = "propcache-0.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51"},\n {file = "propcache-0.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e"},\n {file = "propcache-0.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa"},\n {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf"},\n {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b"},\n {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9"},\n {file = "propcache-0.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6"},\n {file = "propcache-0.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c"},\n {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075"},\n {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c"},\n {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810"},\n {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3"},\n {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7"},\n {file = "propcache-0.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c"},\n {file = "propcache-0.3.0-cp311-cp311-win32.whl", hash = "sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d"},\n {file = "propcache-0.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32"},\n {file = "propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e"},\n {file = "propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af"},\n {file = "propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5"},\n {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b"},\n {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667"},\n {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7"},\n {file = "propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7"},\n {file = "propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf"},\n {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138"},\n {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86"},\n {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d"},\n {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e"},\n {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64"},\n {file = "propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c"},\n {file = "propcache-0.3.0-cp312-cp312-win32.whl", hash = "sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d"},\n {file = "propcache-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57"},\n {file = "propcache-0.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568"},\n {file = "propcache-0.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9"},\n {file = "propcache-0.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767"},\n {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8"},\n {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0"},\n {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d"},\n {file = "propcache-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05"},\n {file = "propcache-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe"},\n {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1"},\n {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92"},\n {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787"},\n {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545"},\n {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e"},\n {file = "propcache-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626"},\n {file = "propcache-0.3.0-cp313-cp313-win32.whl", hash = "sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374"},\n {file = "propcache-0.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a"},\n {file = "propcache-0.3.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf"},\n {file = "propcache-0.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0"},\n {file = "propcache-0.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829"},\n {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa"},\n {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6"},\n {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db"},\n {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54"},\n {file = "propcache-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121"},\n {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e"},\n {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e"},\n {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a"},\n {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac"},\n {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e"},\n {file = "propcache-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf"},\n {file = "propcache-0.3.0-cp313-cp313t-win32.whl", hash = "sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863"},\n {file = "propcache-0.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46"},\n {file = "propcache-0.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:03c091bb752349402f23ee43bb2bff6bd80ccab7c9df6b88ad4322258d6960fc"},\n {file = "propcache-0.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46ed02532cb66612d42ae5c3929b5e98ae330ea0f3900bc66ec5f4862069519b"},\n {file = "propcache-0.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11ae6a8a01b8a4dc79093b5d3ca2c8a4436f5ee251a9840d7790dccbd96cb649"},\n {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df03cd88f95b1b99052b52b1bb92173229d7a674df0ab06d2b25765ee8404bce"},\n {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03acd9ff19021bd0567582ac88f821b66883e158274183b9e5586f678984f8fe"},\n {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd54895e4ae7d32f1e3dd91261df46ee7483a735017dc6f987904f194aa5fd14"},\n {file = "propcache-0.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26a67e5c04e3119594d8cfae517f4b9330c395df07ea65eab16f3d559b7068fe"},\n {file = "propcache-0.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee25f1ac091def37c4b59d192bbe3a206298feeb89132a470325bf76ad122a1e"},\n {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58e6d2a5a7cb3e5f166fd58e71e9a4ff504be9dc61b88167e75f835da5764d07"},\n {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:be90c94570840939fecedf99fa72839aed70b0ced449b415c85e01ae67422c90"},\n {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:49ea05212a529c2caffe411e25a59308b07d6e10bf2505d77da72891f9a05641"},\n {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:119e244ab40f70a98c91906d4c1f4c5f2e68bd0b14e7ab0a06922038fae8a20f"},\n {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:507c5357a8d8b4593b97fb669c50598f4e6cccbbf77e22fa9598aba78292b4d7"},\n {file = "propcache-0.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8526b0941ec5a40220fc4dfde76aed58808e2b309c03e9fa8e2260083ef7157f"},\n {file = "propcache-0.3.0-cp39-cp39-win32.whl", hash = "sha256:7cedd25e5f678f7738da38037435b340694ab34d424938041aa630d8bac42663"},\n {file = "propcache-0.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:bf4298f366ca7e1ad1d21bbb58300a6985015909964077afd37559084590c929"},\n {file = "propcache-0.3.0-py3-none-any.whl", hash = "sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043"},\n {file = "propcache-0.3.0.tar.gz", hash = "sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5"},\n]\n\n[[package]]\nname = "protobuf"\nversion = "5.29.4"\ndescription = ""\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"},\n {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"},\n {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"},\n {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"},\n {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"},\n {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"},\n {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"},\n {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"},\n {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"},\n {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"},\n {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"},\n]\n\n[[package]]\nname = "pydantic"\nversion = "2.10.6"\ndescription = "Data validation using Python type hints"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},\n {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},\n]\n\n[package.dependencies]\nannotated-types = ">=0.6.0"\npydantic-core = "2.27.2"\ntyping-extensions = ">=4.12.2"\n\n[package.extras]\nemail = ["email-validator (>=2.0.0)"]\ntimezone = ["tzdata"]\n\n[[package]]\nname = "pydantic-core"\nversion = "2.27.2"\ndescription = "Core functionality for Pydantic validation and serialization"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},\n {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},\n {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"},\n {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"},\n {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"},\n {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"},\n {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"},\n {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"},\n {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"},\n {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"},\n {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"},\n {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"},\n {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"},\n {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"},\n {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"},\n {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"},\n {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"},\n {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"},\n {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"},\n {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"},\n {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"},\n {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"},\n {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"},\n {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"},\n {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"},\n {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"},\n {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"},\n {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"},\n {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"},\n {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"},\n {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"},\n {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"},\n {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"},\n {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"},\n {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"},\n {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"},\n {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"},\n {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"},\n {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"},\n {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"},\n {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"},\n {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"},\n {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"},\n {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"},\n {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"},\n {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"},\n {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"},\n {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"},\n {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"},\n {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"},\n {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"},\n {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"},\n {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"},\n {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"},\n {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"},\n {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"},\n {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"},\n {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"},\n {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"},\n {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"},\n {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"},\n {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"},\n {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"},\n {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"},\n {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"},\n {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"},\n {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"},\n {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"},\n {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"},\n {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"},\n {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"},\n {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"},\n {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"},\n {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"},\n {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"},\n {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"},\n {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"},\n {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"},\n {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"},\n {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"},\n {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"},\n {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"},\n {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"},\n {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"},\n]\n\n[package.dependencies]\ntyping-extensions = ">=4.6.0,<4.7.0 || >4.7.0"\n\n[[package]]\nname = "pydantic-settings"\nversion = "2.8.1"\ndescription = "Settings management using Pydantic"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c"},\n {file = "pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585"},\n]\n\n[package.dependencies]\npydantic = ">=2.7.0"\npython-dotenv = ">=0.21.0"\n\n[package.extras]\nazure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"]\ntoml = ["tomli (>=2.0.1)"]\nyaml = ["pyyaml (>=6.0.1)"]\n\n[[package]]\nname = "python-dateutil"\nversion = "2.9.0.post0"\ndescription = "Extensions to the standard Python datetime module"\noptional = false\npython-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"\ngroups = ["main"]\nfiles = [\n {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},\n {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},\n]\n\n[package.dependencies]\nsix = ">=1.5"\n\n[[package]]\nname = "python-dotenv"\nversion = "1.0.1"\ndescription = "Read key-value pairs from a .env file and set them as environment variables"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"},\n {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"},\n]\n\n[package.extras]\ncli = ["click (>=5.0)"]\n\n[[package]]\nname = "pyyaml"\nversion = "6.0.2"\ndescription = "YAML parser and emitter for Python"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},\n {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},\n {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},\n {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},\n {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},\n {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},\n {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},\n {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},\n {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},\n {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},\n {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},\n {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},\n {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},\n {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},\n {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},\n {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},\n {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},\n {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},\n {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},\n {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},\n {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},\n {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},\n {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},\n {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},\n {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},\n {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},\n {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},\n {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},\n {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},\n {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},\n {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},\n {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},\n {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},\n {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},\n {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},\n {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},\n {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},\n {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},\n {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},\n {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},\n {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},\n {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},\n {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},\n {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},\n {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},\n {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},\n {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},\n {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},\n {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},\n {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},\n {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},\n {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},\n {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},\n]\n\n[[package]]\nname = "setuptools"\nversion = "78.0.2"\ndescription = "Easily download, build, install, upgrade, and uninstall Python packages"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "setuptools-78.0.2-py3-none-any.whl", hash = "sha256:4a612c80e1f1d71b80e4906ce730152e8dec23df439f82731d9d0b608d7b700d"},\n {file = "setuptools-78.0.2.tar.gz", hash = "sha256:137525e6afb9022f019d6e884a319017f9bf879a0d8783985d32cbc8683cab93"},\n]\n\n[package.extras]\ncheck = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]\ncore = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]\ncover = ["pytest-cov"]\ndoc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]\nenabler = ["pytest-enabler (>=2.2)"]\ntest = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]\ntype = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]\n\n[[package]]\nname = "six"\nversion = "1.17.0"\ndescription = "Python 2 and 3 compatibility utilities"\noptional = false\npython-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"\ngroups = ["main"]\nfiles = [\n {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},\n {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},\n]\n\n[[package]]\nname = "tenacity"\nversion = "9.0.0"\ndescription = "Retry code until it succeeds"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},\n {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},\n]\n\n[package.extras]\ndoc = ["reno", "sphinx"]\ntest = ["pytest", "tornado (>=4.5)", "typeguard"]\n\n[[package]]\nname = "types-python-dateutil"\nversion = "2.9.0.20241206"\ndescription = "Typing stubs for python-dateutil"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"},\n {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"},\n]\n\n[[package]]\nname = "types-pyyaml"\nversion = "6.0.12.20241230"\ndescription = "Typing stubs for PyYAML"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "types_PyYAML-6.0.12.20241230-py3-none-any.whl", hash = "sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6"},\n {file = "types_pyyaml-6.0.12.20241230.tar.gz", hash = "sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c"},\n]\n\n[[package]]\nname = "typing-extensions"\nversion = "4.12.2"\ndescription = "Backported and Experimental Type Hints for Python 3.8+"\noptional = false\npython-versions = ">=3.8"\ngroups = ["main"]\nfiles = [\n {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},\n {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},\n]\n\n[[package]]\nname = "urllib3"\nversion = "2.3.0"\ndescription = "HTTP library with thread-safe connection pooling, file post, and more."\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"},\n {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"},\n]\n\n[package.extras]\nbrotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]\nh2 = ["h2 (>=4,<5)"]\nsocks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]\nzstd = ["zstandard (>=0.18.0)"]\n\n[[package]]\nname = "yarl"\nversion = "1.18.3"\ndescription = "Yet another URL library"\noptional = false\npython-versions = ">=3.9"\ngroups = ["main"]\nfiles = [\n {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"},\n {file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"},\n {file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"},\n {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"},\n {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"},\n {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"},\n {file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"},\n {file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"},\n {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"},\n {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"},\n {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"},\n {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"},\n {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"},\n {file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"},\n {file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"},\n {file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"},\n {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"},\n {file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"},\n {file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"},\n {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"},\n {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"},\n {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"},\n {file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"},\n {file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"},\n {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"},\n {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"},\n {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"},\n {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"},\n {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"},\n {file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"},\n {file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"},\n {file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"},\n {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"},\n {file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"},\n {file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"},\n {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"},\n {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"},\n {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"},\n {file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"},\n {file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"},\n {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"},\n {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"},\n {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"},\n {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"},\n {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"},\n {file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"},\n {file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"},\n {file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"},\n {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"},\n {file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"},\n {file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"},\n {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"},\n {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"},\n {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"},\n {file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"},\n {file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"},\n {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"},\n {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"},\n {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"},\n {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"},\n {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"},\n {file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"},\n {file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"},\n {file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"},\n {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"},\n {file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"},\n {file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"},\n {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"},\n {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"},\n {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"},\n {file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"},\n {file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"},\n {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"},\n {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"},\n {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"},\n {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"},\n {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"},\n {file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"},\n {file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"},\n {file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"},\n {file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"},\n {file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"},\n]\n\n[package.dependencies]\nidna = ">=2.0"\nmultidict = ">=4.0"\npropcache = ">=0.2.0"\n\n[metadata]\nlock-version = "2.1"\npython-versions = "^3.10"\ncontent-hash = "74c12e499aa797ca5c8559af579f1212b0e4e3a77f068f9385db39d70ba304e0"\n', - source: 'out/python/quickstart/poetry.lock', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/pyproject.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/pyproject.ts deleted file mode 100644 index 413651e31..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/pyproject.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - '[tool.poetry]\nname = "hatchet-python-quickstart"\nversion = "0.1.0"\ndescription = "Simple Setup to Run Hatchet Workflows"\nauthors = ["gabriel ruttner "]\nreadme = "README.md"\npackage-mode = false\n\n[tool.poetry.dependencies]\npython = "^3.10"\nhatchet-sdk = "1.0.0a1"\n\n\n[build-system]\nrequires = ["poetry-core"]\nbuild-backend = "poetry.core.masonry.api"\n\n[tool.poetry.scripts]\nsimple = "src.run:main"\nworker = "src.worker:main"\n', - source: 'out/python/quickstart/pyproject.toml', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/run.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/run.ts deleted file mode 100644 index 6266c9cf9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom .workflows.first_task import SimpleInput, first_task\n\n\nasync def main() -> None:\n result = await first_task.aio_run(SimpleInput(message="Hello World!"))\n\n print(\n "Finished running task, and got the transformed message! The transformed message is:",\n result.transformed_message,\n )\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/quickstart/run.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/worker.ts deleted file mode 100644 index 499b304ae..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from .hatchet_client import hatchet\nfrom .workflows.first_task import first_task\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "first-worker",\n slots=10,\n workflows=[first_task],\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/quickstart/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/__init__.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/__init__.ts deleted file mode 100644 index 451d4f6f8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/__init__.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: '', - source: 'out/python/quickstart/workflows/__init__.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/first_task.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/first_task.ts deleted file mode 100644 index 707468091..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/first_task.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from pydantic import BaseModel\n\nfrom hatchet_sdk import Context\n\nfrom ..hatchet_client import hatchet\n\n\nclass SimpleInput(BaseModel):\n message: str\n\n\nclass SimpleOutput(BaseModel):\n transformed_message: str\n\n\n# Declare the task to run\n@hatchet.task(name="first-task", input_validator=SimpleInput)\ndef first_task(input: SimpleInput, ctx: Context) -> SimpleOutput:\n print("first-task task called")\n\n return SimpleOutput(transformed_message=input.message.lower())\n', - source: 'out/python/quickstart/workflows/first_task.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/index.ts deleted file mode 100644 index e645441af..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/quickstart/workflows/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import __init__ from './__init__'; -import first_task from './first_task'; - -export { __init__ }; -export { first_task }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/dynamic.ts b/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/dynamic.ts deleted file mode 100644 index a6599381d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/dynamic.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\nfrom hatchet_sdk.rate_limit import RateLimit\n\nhatchet = Hatchet(debug=True)\n\n\nclass DynamicRateLimitInput(BaseModel):\n group: str\n units: int\n limit: int\n\n\ndynamic_rate_limit_workflow = hatchet.workflow(\n name="DynamicRateLimitWorkflow", input_validator=DynamicRateLimitInput\n)\n\n\n@dynamic_rate_limit_workflow.task(\n rate_limits=[\n RateLimit(\n dynamic_key=\'"LIMIT:"+input.group\',\n units="input.units",\n limit="input.limit",\n )\n ]\n)\ndef step1(input: DynamicRateLimitInput, ctx: Context) -> None:\n print("executed step1")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "rate-limit-worker", slots=10, workflows=[dynamic_rate_limit_workflow]\n )\n worker.start()\n', - source: 'out/python/rate_limit/dynamic.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/index.ts deleted file mode 100644 index a0208eeb6..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import dynamic from './dynamic'; -import trigger from './trigger'; -import worker from './worker'; - -export { dynamic }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/trigger.ts deleted file mode 100644 index b61c8a3a9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.rate_limit.worker import rate_limit_workflow\nfrom hatchet_sdk.hatchet import Hatchet\n\nhatchet = Hatchet(debug=True)\n\nrate_limit_workflow.run()\nrate_limit_workflow.run()\nrate_limit_workflow.run()\n', - source: 'out/python/rate_limit/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/worker.ts deleted file mode 100644 index eaa24f481..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/rate_limit/worker.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\nfrom hatchet_sdk.rate_limit import RateLimit, RateLimitDuration\n\nhatchet = Hatchet(debug=True)\n\n\n# > Workflow\nclass RateLimitInput(BaseModel):\n user_id: str\n\n\nrate_limit_workflow = hatchet.workflow(\n name="RateLimitWorkflow", input_validator=RateLimitInput\n)\n\n\n\n# > Static\nRATE_LIMIT_KEY = "test-limit"\n\n\n@rate_limit_workflow.task(rate_limits=[RateLimit(static_key=RATE_LIMIT_KEY, units=1)])\ndef step_1(input: RateLimitInput, ctx: Context) -> None:\n print("executed step_1")\n\n\n\n# > Dynamic\n\n\n@rate_limit_workflow.task(\n rate_limits=[\n RateLimit(\n dynamic_key="input.user_id",\n units=1,\n limit=10,\n duration=RateLimitDuration.MINUTE,\n )\n ]\n)\ndef step_2(input: RateLimitInput, ctx: Context) -> None:\n print("executed step_2")\n\n\n\n\ndef main() -> None:\n hatchet.rate_limits.put(RATE_LIMIT_KEY, 2, RateLimitDuration.SECOND)\n\n worker = hatchet.worker(\n "rate-limit-worker", slots=10, workflows=[rate_limit_workflow]\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/rate_limit/worker.py', - blocks: { - workflow: { - start: 10, - stop: 17, - }, - static: { - start: 21, - stop: 28, - }, - dynamic: { - start: 31, - stop: 46, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/retries/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/retries/index.ts deleted file mode 100644 index c1b323985..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/retries/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/retries/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/retries/worker.ts deleted file mode 100644 index ba549e4b9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/retries/worker.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\nsimple_workflow = hatchet.workflow(name="SimpleRetryWorkflow")\nbackoff_workflow = hatchet.workflow(name="BackoffWorkflow")\n\n\n# > Simple Step Retries\n@simple_workflow.task(retries=3)\ndef always_fail(input: EmptyModel, ctx: Context) -> dict[str, str]:\n raise Exception("simple task failed")\n\n\n\n\n# > Retries with Count\n@simple_workflow.task(retries=3)\ndef fail_twice(input: EmptyModel, ctx: Context) -> dict[str, str]:\n if ctx.retry_count < 2:\n raise Exception("simple task failed")\n\n return {"status": "success"}\n\n\n\n\n# > Retries with Backoff\n@backoff_workflow.task(\n retries=10,\n # 👀 Maximum number of seconds to wait between retries\n backoff_max_seconds=10,\n # 👀 Factor to increase the wait time between retries.\n # This sequence will be 2s, 4s, 8s, 10s, 10s, 10s... due to the maxSeconds limit\n backoff_factor=2.0,\n)\ndef backoff_task(input: EmptyModel, ctx: Context) -> dict[str, str]:\n if ctx.retry_count < 3:\n raise Exception("backoff task failed")\n\n return {"status": "success"}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("backoff-worker", slots=4, workflows=[backoff_workflow])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/retries/worker.py', - blocks: { - simple_step_retries: { - start: 10, - stop: 14, - }, - retries_with_count: { - start: 18, - stop: 25, - }, - retries_with_backoff: { - start: 29, - stop: 43, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/index.ts deleted file mode 100644 index 0d75b755c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_return_exceptions from './test_return_exceptions'; -import worker from './worker'; - -export { test_return_exceptions }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/test_return_exceptions.ts b/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/test_return_exceptions.ts deleted file mode 100644 index 1aabaff71..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/test_return_exceptions.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nimport pytest\n\nfrom examples.return_exceptions.worker import Input, return_exceptions_task\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_return_exceptions_async() -> None:\n results = await return_exceptions_task.aio_run_many(\n [\n return_exceptions_task.create_bulk_run_item(input=Input(index=i))\n for i in range(10)\n ],\n return_exceptions=True,\n )\n\n for i, result in enumerate(results):\n if i % 2 == 0:\n assert isinstance(result, Exception)\n assert f"error in task with index {i}" in str(result)\n else:\n assert result == {"message": "this is a successful task."}\n\n\ndef test_return_exceptions_sync() -> None:\n results = return_exceptions_task.run_many(\n [\n return_exceptions_task.create_bulk_run_item(input=Input(index=i))\n for i in range(10)\n ],\n return_exceptions=True,\n )\n\n for i, result in enumerate(results):\n if i % 2 == 0:\n assert isinstance(result, Exception)\n assert f"error in task with index {i}" in str(result)\n else:\n assert result == {"message": "this is a successful task."}\n', - source: 'out/python/return_exceptions/test_return_exceptions.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/worker.ts deleted file mode 100644 index ca09ca5e2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/return_exceptions/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet()\n\n\nclass Input(EmptyModel):\n index: int\n\n\n@hatchet.task(input_validator=Input)\nasync def return_exceptions_task(input: Input, ctx: Context) -> dict[str, str]:\n if input.index % 2 == 0:\n raise ValueError(f"error in task with index {input.index}")\n\n return {"message": "this is a successful task."}\n', - source: 'out/python/return_exceptions/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/index.ts deleted file mode 100644 index 563be6a90..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import programatic_async from './programatic-async'; -import programatic_sync from './programatic-sync'; - -export { programatic_async }; -export { programatic_sync }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/programatic-async.ts b/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/programatic-async.ts deleted file mode 100644 index 609d1a9b5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/programatic-async.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from datetime import datetime, timedelta, timezone\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n\nasync def create_scheduled() -> None:\n # > Create\n scheduled_run = await hatchet.scheduled.aio_create(\n workflow_name="simple-workflow",\n trigger_at=datetime.now(tz=timezone.utc) + timedelta(seconds=10),\n input={\n "data": "simple-workflow-data",\n },\n additional_metadata={\n "customer_id": "customer-a",\n },\n )\n\n scheduled_run.metadata.id # the id of the scheduled run trigger\n\n # > Delete\n await hatchet.scheduled.aio_delete(scheduled_id=scheduled_run.metadata.id)\n\n # > List\n await hatchet.scheduled.aio_list()\n\n # > Get\n scheduled_run = await hatchet.scheduled.aio_get(\n scheduled_id=scheduled_run.metadata.id\n )\n', - source: 'out/python/scheduled/programatic-async.py', - blocks: { - create: { - start: 10, - stop: 21, - }, - delete: { - start: 24, - stop: 24, - }, - list: { - start: 27, - stop: 27, - }, - get: { - start: 30, - stop: 32, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/programatic-sync.ts b/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/programatic-sync.ts deleted file mode 100644 index ee267cbd0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/scheduled/programatic-sync.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from datetime import datetime, timedelta, timezone\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n# > Create\nscheduled_run = hatchet.scheduled.create(\n workflow_name="simple-workflow",\n trigger_at=datetime.now(tz=timezone.utc) + timedelta(seconds=10),\n input={\n "data": "simple-workflow-data",\n },\n additional_metadata={\n "customer_id": "customer-a",\n },\n)\n\nid = scheduled_run.metadata.id # the id of the scheduled run trigger\n\n# > Delete\nhatchet.scheduled.delete(scheduled_id=scheduled_run.metadata.id)\n\n# > List\nscheduled_runs = hatchet.scheduled.list()\n\n# > Get\nscheduled_run = hatchet.scheduled.get(scheduled_id=scheduled_run.metadata.id)\n', - source: 'out/python/scheduled/programatic-sync.py', - blocks: { - create: { - start: 8, - stop: 19, - }, - delete: { - start: 22, - stop: 22, - }, - list: { - start: 25, - stop: 25, - }, - get: { - start: 28, - stop: 28, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/simple/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/simple/index.ts deleted file mode 100644 index 20dbfdc2e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/simple/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_simple_workflow from './test_simple_workflow'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_simple_workflow }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/simple/test_simple_workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/python/simple/test_simple_workflow.ts deleted file mode 100644 index cb61d4b80..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/simple/test_simple_workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import pytest\n\nfrom examples.simple.worker import simple, simple_durable\nfrom hatchet_sdk import EmptyModel\nfrom hatchet_sdk.runnables.workflow import Standalone\n\n\n@pytest.mark.parametrize("task", [simple, simple_durable])\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_simple_workflow_running_options(\n task: Standalone[EmptyModel, dict[str, str]],\n) -> None:\n x1 = task.run()\n x2 = await task.aio_run()\n\n x3 = task.run_many([task.create_bulk_run_item()])[0]\n x4 = (await task.aio_run_many([task.create_bulk_run_item()]))[0]\n\n x5 = task.run_no_wait().result()\n x6 = (await task.aio_run_no_wait()).result()\n x7 = [x.result() for x in task.run_many_no_wait([task.create_bulk_run_item()])][0]\n x8 = [\n x.result()\n for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n\n x9 = await task.run_no_wait().aio_result()\n x10 = await (await task.aio_run_no_wait()).aio_result()\n x11 = [\n await x.aio_result()\n for x in task.run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n x12 = [\n await x.aio_result()\n for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n\n assert all(\n x == {"result": "Hello, world!"}\n for x in [x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12]\n )\n', - source: 'out/python/simple/test_simple_workflow.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/simple/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/simple/trigger.ts deleted file mode 100644 index 40a1ee5e3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/simple/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: 'from examples.simple.worker import simple\n\nsimple.run()\n', - source: 'out/python/simple/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/simple/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/simple/worker.ts deleted file mode 100644 index 94fc3d484..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/simple/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Simple\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n@hatchet.task()\ndef simple(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {"result": "Hello, world!"}\n\n\n@hatchet.durable_task()\ndef simple_durable(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {"result": "Hello, world!"}\n\n\ndef main() -> None:\n worker = hatchet.worker("test-worker", workflows=[simple, simple_durable])\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/simple/worker.py', - blocks: { - simple: { - start: 2, - stop: 22, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/event.ts b/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/event.ts deleted file mode 100644 index 7ccec6c32..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/event.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.sticky_workers.worker import sticky_workflow\nfrom hatchet_sdk import TriggerWorkflowOptions\n\nsticky_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata={"hello": "moon"}),\n)\n', - source: 'out/python/sticky_workers/event.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/index.ts deleted file mode 100644 index 5d82219b0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import event from './event'; -import worker from './worker'; - -export { event }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/worker.ts deleted file mode 100644 index 1489c2f3e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/sticky_workers/worker.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from hatchet_sdk import (\n Context,\n EmptyModel,\n Hatchet,\n StickyStrategy,\n TriggerWorkflowOptions,\n)\n\nhatchet = Hatchet(debug=True)\n\n# > StickyWorker\n\n\nsticky_workflow = hatchet.workflow(\n name="StickyWorkflow",\n # 👀 Specify a sticky strategy when declaring the workflow\n sticky=StickyStrategy.SOFT,\n)\n\n\n@sticky_workflow.task()\ndef step1a(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n return {"worker": ctx.worker.id()}\n\n\n@sticky_workflow.task()\ndef step1b(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n return {"worker": ctx.worker.id()}\n\n\n\n# > StickyChild\n\nsticky_child_workflow = hatchet.workflow(\n name="StickyChildWorkflow", sticky=StickyStrategy.SOFT\n)\n\n\n@sticky_workflow.task(parents=[step1a, step1b])\nasync def step2(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n ref = await sticky_child_workflow.aio_run_no_wait(\n options=TriggerWorkflowOptions(sticky=True)\n )\n\n await ref.aio_result()\n\n return {"worker": ctx.worker.id()}\n\n\n@sticky_child_workflow.task()\ndef child(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n return {"worker": ctx.worker.id()}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "sticky-worker", slots=10, workflows=[sticky_workflow, sticky_child_workflow]\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/sticky_workers/worker.py', - blocks: { - stickyworker: { - start: 12, - stop: 30, - }, - stickychild: { - start: 33, - stop: 54, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/async_stream.ts b/frontend/app/src/next/lib/docs/generated/snips/python/streaming/async_stream.ts deleted file mode 100644 index 2bbb8c145..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/async_stream.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\n\nfrom examples.streaming.worker import hatchet, stream_task\nfrom hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType\n\n\nasync def main() -> None:\n # > Consume\n ref = await stream_task.aio_run_no_wait()\n\n async for chunk in hatchet.runs.subscribe_to_stream(ref.workflow_run_id):\n print(chunk, flush=True, end="")\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n', - source: 'out/python/streaming/async_stream.py', - blocks: { - consume: { - start: 9, - stop: 12, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/fastapi_proxy.ts b/frontend/app/src/next/lib/docs/generated/snips/python/streaming/fastapi_proxy.ts deleted file mode 100644 index d194c31fb..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/fastapi_proxy.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from typing import AsyncGenerator\n\nfrom fastapi import FastAPI\nfrom fastapi.responses import StreamingResponse\n\nfrom examples.streaming.worker import stream_task\nfrom hatchet_sdk import Hatchet\n\n# > FastAPI Proxy\nhatchet = Hatchet()\napp = FastAPI()\n\n\n@app.get("/stream")\nasync def stream() -> StreamingResponse:\n ref = await stream_task.aio_run_no_wait()\n\n return StreamingResponse(\n hatchet.runs.subscribe_to_stream(ref.workflow_run_id), media_type="text/plain"\n )\n\n\n\nif __name__ == "__main__":\n import uvicorn\n\n uvicorn.run(app, host="0.0.0.0", port=8000)\n', - source: 'out/python/streaming/fastapi_proxy.py', - blocks: { - fastapi_proxy: { - start: 10, - stop: 22, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/streaming/index.ts deleted file mode 100644 index 836654a31..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import async_stream from './async_stream'; -import fastapi_proxy from './fastapi_proxy'; -import test_streaming from './test_streaming'; -import worker from './worker'; - -export { async_stream }; -export { fastapi_proxy }; -export { test_streaming }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/test_streaming.ts b/frontend/app/src/next/lib/docs/generated/snips/python/streaming/test_streaming.ts deleted file mode 100644 index 705041728..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/test_streaming.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from subprocess import Popen\nfrom typing import Any\n\nimport pytest\n\nfrom examples.streaming.worker import chunks, stream_task\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType\n\n\n@pytest.mark.parametrize(\n "on_demand_worker",\n [\n (\n ["poetry", "run", "python", "examples/streaming/worker.py", "--slots", "1"],\n 8008,\n )\n ],\n indirect=True,\n)\n@pytest.mark.parametrize("execution_number", range(5)) # run test multiple times\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_streaming_ordering_and_completeness(\n execution_number: int,\n hatchet: Hatchet,\n on_demand_worker: Popen[Any],\n) -> None:\n ref = await stream_task.aio_run_no_wait()\n\n ix = 0\n anna_karenina = ""\n\n async for chunk in hatchet.runs.subscribe_to_stream(ref.workflow_run_id):\n assert chunks[ix] == chunk\n ix += 1\n anna_karenina += chunk\n\n assert ix == len(chunks)\n assert anna_karenina == "".join(chunks)\n\n await ref.aio_result()\n', - source: 'out/python/streaming/test_streaming.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/streaming/worker.ts deleted file mode 100644 index d6b8d97a8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/streaming/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom typing import Generator\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=False)\n\n# > Streaming\n\nanna_karenina = """\nHappy families are all alike; every unhappy family is unhappy in its own way.\n\nEverything was in confusion in the Oblonskys\' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him.\n"""\n\n\ndef create_chunks(content: str, n: int) -> Generator[str, None, None]:\n for i in range(0, len(content), n):\n yield content[i : i + n]\n\n\nchunks = list(create_chunks(anna_karenina, 10))\n\n\n@hatchet.task()\nasync def stream_task(input: EmptyModel, ctx: Context) -> None:\n # 👀 Sleeping to avoid race conditions\n await asyncio.sleep(2)\n\n for chunk in chunks:\n await ctx.aio_put_stream(chunk)\n await asyncio.sleep(0.20)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker("test-worker", workflows=[stream_task])\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/streaming/worker.py', - blocks: { - streaming: { - start: 9, - stop: 34, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/timeout/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/timeout/index.ts deleted file mode 100644 index 08b91447f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/timeout/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_timeout from './test_timeout'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_timeout }; -export { trigger }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/timeout/test_timeout.ts b/frontend/app/src/next/lib/docs/generated/snips/python/timeout/test_timeout.ts deleted file mode 100644 index b2454801c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/timeout/test_timeout.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import pytest\n\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_execution_timeout() -> None:\n run = timeout_wf.run_no_wait()\n\n with pytest.raises(\n Exception,\n match="(Task exceeded timeout|TIMED_OUT|Workflow run .* failed with multiple errors)",\n ):\n await run.aio_result()\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run_refresh_timeout() -> None:\n result = await refresh_timeout_wf.aio_run()\n\n assert result["refresh_task"]["status"] == "success"\n', - source: 'out/python/timeout/test_timeout.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/timeout/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/timeout/trigger.ts deleted file mode 100644 index fe206314a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/timeout/trigger.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.timeout.worker import refresh_timeout_wf, timeout_wf\n\ntimeout_wf.run()\nrefresh_timeout_wf.run()\n', - source: 'out/python/timeout/trigger.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/timeout/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/timeout/worker.ts deleted file mode 100644 index 81943e7e5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/timeout/worker.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import time\nfrom datetime import timedelta\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet, TaskDefaults\n\nhatchet = Hatchet(debug=True)\n\n# > ScheduleTimeout\ntimeout_wf = hatchet.workflow(\n name="TimeoutWorkflow",\n task_defaults=TaskDefaults(execution_timeout=timedelta(minutes=2)),\n)\n\n\n# > ExecutionTimeout\n# 👀 Specify an execution timeout on a task\n@timeout_wf.task(\n execution_timeout=timedelta(seconds=5), schedule_timeout=timedelta(minutes=10)\n)\ndef timeout_task(input: EmptyModel, ctx: Context) -> dict[str, str]:\n time.sleep(30)\n return {"status": "success"}\n\n\n\nrefresh_timeout_wf = hatchet.workflow(name="RefreshTimeoutWorkflow")\n\n\n# > RefreshTimeout\n@refresh_timeout_wf.task(execution_timeout=timedelta(seconds=4))\ndef refresh_task(input: EmptyModel, ctx: Context) -> dict[str, str]:\n ctx.refresh_timeout(timedelta(seconds=10))\n time.sleep(5)\n\n return {"status": "success"}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "timeout-worker", slots=4, workflows=[timeout_wf, refresh_timeout_wf]\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/timeout/worker.py', - blocks: { - scheduletimeout: { - start: 9, - stop: 12, - }, - executiontimeout: { - start: 16, - stop: 24, - }, - refreshtimeout: { - start: 30, - stop: 37, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/index.ts deleted file mode 100644 index 7b1b8c337..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_unit from './test_unit'; -import workflows from './workflows'; - -export { test_unit }; -export { workflows }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/test_unit.ts b/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/test_unit.ts deleted file mode 100644 index d7a34cd47..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/test_unit.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import pytest\n\nfrom examples.unit_testing.workflows import (\n Lifespan,\n UnitTestInput,\n UnitTestOutput,\n async_complex_workflow,\n async_simple_workflow,\n async_standalone,\n durable_async_complex_workflow,\n durable_async_simple_workflow,\n durable_async_standalone,\n durable_sync_complex_workflow,\n durable_sync_simple_workflow,\n durable_sync_standalone,\n start,\n sync_complex_workflow,\n sync_simple_workflow,\n sync_standalone,\n)\nfrom hatchet_sdk import Task\n\n\n@pytest.mark.parametrize(\n "func",\n [\n sync_standalone,\n durable_sync_standalone,\n sync_simple_workflow,\n durable_sync_simple_workflow,\n sync_complex_workflow,\n durable_sync_complex_workflow,\n ],\n)\ndef test_simple_unit_sync(func: Task[UnitTestInput, UnitTestOutput]) -> None:\n input = UnitTestInput(key="test_key", number=42)\n additional_metadata = {"meta_key": "meta_value"}\n lifespan = Lifespan(mock_db_url="sqlite:///:memory:")\n retry_count = 1\n\n expected_output = UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=additional_metadata,\n retry_count=retry_count,\n mock_db_url=lifespan.mock_db_url,\n )\n\n assert (\n func.mock_run(\n input=input,\n additional_metadata=additional_metadata,\n lifespan=lifespan,\n retry_count=retry_count,\n parent_outputs={start.name: expected_output.model_dump()},\n )\n == expected_output\n )\n\n\n@pytest.mark.parametrize(\n "func",\n [\n async_standalone,\n durable_async_standalone,\n async_simple_workflow,\n durable_async_simple_workflow,\n async_complex_workflow,\n durable_async_complex_workflow,\n ],\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_simple_unit_async(func: Task[UnitTestInput, UnitTestOutput]) -> None:\n input = UnitTestInput(key="test_key", number=42)\n additional_metadata = {"meta_key": "meta_value"}\n lifespan = Lifespan(mock_db_url="sqlite:///:memory:")\n retry_count = 1\n\n expected_output = UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=additional_metadata,\n retry_count=retry_count,\n mock_db_url=lifespan.mock_db_url,\n )\n\n assert (\n await func.aio_mock_run(\n input=input,\n additional_metadata=additional_metadata,\n lifespan=lifespan,\n retry_count=retry_count,\n parent_outputs={start.name: expected_output.model_dump()},\n )\n == expected_output\n )\n', - source: 'out/python/unit_testing/test_unit.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/workflows.ts b/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/workflows.ts deleted file mode 100644 index 6819ecfc0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/unit_testing/workflows.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from typing import cast\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, DurableContext, EmptyModel, Hatchet\n\n\nclass UnitTestInput(BaseModel):\n key: str\n number: int\n\n\nclass Lifespan(BaseModel):\n mock_db_url: str\n\n\nclass UnitTestOutput(UnitTestInput, Lifespan):\n additional_metadata: dict[str, str]\n retry_count: int\n\n\nhatchet = Hatchet()\n\n\n@hatchet.task(input_validator=UnitTestInput)\ndef sync_standalone(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@hatchet.task(input_validator=UnitTestInput)\nasync def async_standalone(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@hatchet.durable_task(input_validator=UnitTestInput)\ndef durable_sync_standalone(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@hatchet.durable_task(input_validator=UnitTestInput)\nasync def durable_async_standalone(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\nsimple_workflow = hatchet.workflow(\n name="simple-unit-test-workflow", input_validator=UnitTestInput\n)\n\n\n@simple_workflow.task()\ndef sync_simple_workflow(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@simple_workflow.task()\nasync def async_simple_workflow(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@simple_workflow.durable_task()\ndef durable_sync_simple_workflow(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@simple_workflow.durable_task()\nasync def durable_async_simple_workflow(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\ncomplex_workflow = hatchet.workflow(\n name="complex-unit-test-workflow", input_validator=UnitTestInput\n)\n\n\n@complex_workflow.task()\nasync def start(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@complex_workflow.task(\n parents=[start],\n)\ndef sync_complex_workflow(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return ctx.task_output(start)\n\n\n@complex_workflow.task(\n parents=[start],\n)\nasync def async_complex_workflow(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return ctx.task_output(start)\n\n\n@complex_workflow.durable_task(\n parents=[start],\n)\ndef durable_sync_complex_workflow(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return ctx.task_output(start)\n\n\n@complex_workflow.durable_task(\n parents=[start],\n)\nasync def durable_async_complex_workflow(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return ctx.task_output(start)\n', - source: 'out/python/unit_testing/workflows.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/index.ts deleted file mode 100644 index 8a1e07903..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_webhooks from './test_webhooks'; -import worker from './worker'; - -export { test_webhooks }; -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/test_webhooks.ts b/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/test_webhooks.ts deleted file mode 100644 index 50e2f64b0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/test_webhooks.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nimport base64\nimport hashlib\nimport hmac\nimport json\nfrom collections.abc import AsyncGenerator\nfrom contextlib import asynccontextmanager\nfrom datetime import datetime, timezone\nfrom typing import Any\nfrom uuid import uuid4\n\nimport aiohttp\nimport pytest\n\nfrom examples.webhooks.worker import WebhookInput\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.api.webhook_api import WebhookApi\nfrom hatchet_sdk.clients.rest.models.v1_create_webhook_request import (\n V1CreateWebhookRequest,\n)\nfrom hatchet_sdk.clients.rest.models.v1_create_webhook_request_api_key import (\n V1CreateWebhookRequestAPIKey,\n)\nfrom hatchet_sdk.clients.rest.models.v1_create_webhook_request_basic_auth import (\n V1CreateWebhookRequestBasicAuth,\n)\nfrom hatchet_sdk.clients.rest.models.v1_create_webhook_request_hmac import (\n V1CreateWebhookRequestHMAC,\n)\nfrom hatchet_sdk.clients.rest.models.v1_event import V1Event\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\nfrom hatchet_sdk.clients.rest.models.v1_webhook import V1Webhook\nfrom hatchet_sdk.clients.rest.models.v1_webhook_api_key_auth import V1WebhookAPIKeyAuth\nfrom hatchet_sdk.clients.rest.models.v1_webhook_basic_auth import V1WebhookBasicAuth\nfrom hatchet_sdk.clients.rest.models.v1_webhook_hmac_algorithm import (\n V1WebhookHMACAlgorithm,\n)\nfrom hatchet_sdk.clients.rest.models.v1_webhook_hmac_auth import V1WebhookHMACAuth\nfrom hatchet_sdk.clients.rest.models.v1_webhook_hmac_encoding import (\n V1WebhookHMACEncoding,\n)\nfrom hatchet_sdk.clients.rest.models.v1_webhook_source_name import V1WebhookSourceName\n\nTEST_BASIC_USERNAME = "test_user"\nTEST_BASIC_PASSWORD = "test_password"\nTEST_API_KEY_HEADER = "X-API-Key"\nTEST_API_KEY_VALUE = "test_api_key_123"\nTEST_HMAC_SIGNATURE_HEADER = "X-Signature"\nTEST_HMAC_SECRET = "test_hmac_secret"\n\n\n@pytest.fixture\ndef webhook_body() -> WebhookInput:\n return WebhookInput(type="test", message="Hello, world!")\n\n\n@pytest.fixture\ndef test_run_id() -> str:\n return str(uuid4())\n\n\n@pytest.fixture\ndef test_start() -> datetime:\n return datetime.now(timezone.utc)\n\n\ndef create_hmac_signature(\n payload: bytes,\n secret: str,\n algorithm: V1WebhookHMACAlgorithm = V1WebhookHMACAlgorithm.SHA256,\n encoding: V1WebhookHMACEncoding = V1WebhookHMACEncoding.HEX,\n) -> str:\n algorithm_map = {\n V1WebhookHMACAlgorithm.SHA1: hashlib.sha1,\n V1WebhookHMACAlgorithm.SHA256: hashlib.sha256,\n V1WebhookHMACAlgorithm.SHA512: hashlib.sha512,\n V1WebhookHMACAlgorithm.MD5: hashlib.md5,\n }\n\n hash_func = algorithm_map[algorithm]\n signature = hmac.new(secret.encode(), payload, hash_func).digest()\n\n if encoding == V1WebhookHMACEncoding.HEX:\n return signature.hex()\n if encoding == V1WebhookHMACEncoding.BASE64:\n return base64.b64encode(signature).decode()\n if encoding == V1WebhookHMACEncoding.BASE64URL:\n return base64.urlsafe_b64encode(signature).decode()\n\n raise ValueError(f"Unsupported encoding: {encoding}")\n\n\nasync def send_webhook_request(\n url: str,\n body: WebhookInput,\n auth_type: str,\n auth_data: dict[str, Any] | None = None,\n headers: dict[str, str] | None = None,\n) -> aiohttp.ClientResponse:\n request_headers = headers or {}\n auth = None\n\n if auth_type == "BASIC" and auth_data:\n auth = aiohttp.BasicAuth(auth_data["username"], auth_data["password"])\n elif auth_type == "API_KEY" and auth_data:\n request_headers[auth_data["header_name"]] = auth_data["api_key"]\n elif auth_type == "HMAC" and auth_data:\n payload = json.dumps(body.model_dump()).encode()\n signature = create_hmac_signature(\n payload,\n auth_data["secret"],\n auth_data.get("algorithm", V1WebhookHMACAlgorithm.SHA256),\n auth_data.get("encoding", V1WebhookHMACEncoding.HEX),\n )\n request_headers[auth_data["header_name"]] = signature\n\n async with aiohttp.ClientSession() as session:\n return await session.post(\n url, json=body.model_dump(), auth=auth, headers=request_headers\n )\n\n\nasync def wait_for_event(\n hatchet: Hatchet,\n webhook_name: str,\n test_start: datetime,\n) -> V1Event | None:\n await asyncio.sleep(5)\n\n events = await hatchet.event.aio_list(since=test_start)\n\n if events.rows is None:\n return None\n\n return next(\n (\n event\n for event in events.rows\n if event.triggering_webhook_name == webhook_name\n ),\n None,\n )\n\n\nasync def wait_for_workflow_run(\n hatchet: Hatchet, event_id: str, test_start: datetime\n) -> V1TaskSummary | None:\n await asyncio.sleep(5)\n\n runs = await hatchet.runs.aio_list(\n since=test_start,\n additional_metadata={\n "hatchet__event_id": event_id,\n },\n )\n\n if len(runs.rows) == 0:\n return None\n\n return runs.rows[0]\n\n\n@asynccontextmanager\nasync def basic_auth_webhook(\n hatchet: Hatchet,\n test_run_id: str,\n username: str = TEST_BASIC_USERNAME,\n password: str = TEST_BASIC_PASSWORD,\n source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,\n) -> AsyncGenerator[V1Webhook, None]:\n ## Hack to get the API client\n client = hatchet.metrics.client()\n webhook_api = WebhookApi(client)\n\n webhook_request = V1CreateWebhookRequestBasicAuth(\n sourceName=source_name,\n name=f"test-webhook-basic-{test_run_id}",\n eventKeyExpression=f"\'{hatchet.config.apply_namespace(\'webhook\')}:\' + input.type",\n authType="BASIC",\n auth=V1WebhookBasicAuth(\n username=username,\n password=password,\n ),\n )\n\n incoming_webhook = webhook_api.v1_webhook_create(\n tenant=hatchet.tenant_id,\n v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),\n )\n\n try:\n yield incoming_webhook\n finally:\n webhook_api.v1_webhook_delete(\n tenant=hatchet.tenant_id,\n v1_webhook=incoming_webhook.name,\n )\n\n\n@asynccontextmanager\nasync def api_key_webhook(\n hatchet: Hatchet,\n test_run_id: str,\n header_name: str = TEST_API_KEY_HEADER,\n api_key: str = TEST_API_KEY_VALUE,\n source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,\n) -> AsyncGenerator[V1Webhook, None]:\n client = hatchet.metrics.client()\n webhook_api = WebhookApi(client)\n\n webhook_request = V1CreateWebhookRequestAPIKey(\n sourceName=source_name,\n name=f"test-webhook-apikey-{test_run_id}",\n eventKeyExpression=f"\'{hatchet.config.apply_namespace(\'webhook\')}:\' + input.type",\n authType="API_KEY",\n auth=V1WebhookAPIKeyAuth(\n headerName=header_name,\n apiKey=api_key,\n ),\n )\n\n incoming_webhook = webhook_api.v1_webhook_create(\n tenant=hatchet.tenant_id,\n v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),\n )\n\n try:\n yield incoming_webhook\n finally:\n webhook_api.v1_webhook_delete(\n tenant=hatchet.tenant_id,\n v1_webhook=incoming_webhook.name,\n )\n\n\n@asynccontextmanager\nasync def hmac_webhook(\n hatchet: Hatchet,\n test_run_id: str,\n signature_header_name: str = TEST_HMAC_SIGNATURE_HEADER,\n signing_secret: str = TEST_HMAC_SECRET,\n algorithm: V1WebhookHMACAlgorithm = V1WebhookHMACAlgorithm.SHA256,\n encoding: V1WebhookHMACEncoding = V1WebhookHMACEncoding.HEX,\n source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,\n) -> AsyncGenerator[V1Webhook, None]:\n client = hatchet.metrics.client()\n webhook_api = WebhookApi(client)\n\n webhook_request = V1CreateWebhookRequestHMAC(\n sourceName=source_name,\n name=f"test-webhook-hmac-{test_run_id}",\n eventKeyExpression=f"\'{hatchet.config.apply_namespace(\'webhook\')}:\' + input.type",\n authType="HMAC",\n auth=V1WebhookHMACAuth(\n algorithm=algorithm,\n encoding=encoding,\n signatureHeaderName=signature_header_name,\n signingSecret=signing_secret,\n ),\n )\n\n incoming_webhook = webhook_api.v1_webhook_create(\n tenant=hatchet.tenant_id,\n v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),\n )\n\n try:\n yield incoming_webhook\n finally:\n webhook_api.v1_webhook_delete(\n tenant=hatchet.tenant_id,\n v1_webhook=incoming_webhook.name,\n )\n\n\ndef url(tenant_id: str, webhook_name: str) -> str:\n return f"http://localhost:8080/api/v1/stable/tenants/{tenant_id}/webhooks/{webhook_name}"\n\n\nasync def assert_has_runs(\n hatchet: Hatchet,\n test_start: datetime,\n webhook_body: WebhookInput,\n incoming_webhook: V1Webhook,\n) -> None:\n triggered_event = await wait_for_event(hatchet, incoming_webhook.name, test_start)\n assert triggered_event is not None\n assert (\n triggered_event.key\n == f"{hatchet.config.apply_namespace(\'webhook\')}:{webhook_body.type}"\n )\n assert triggered_event.payload == webhook_body.model_dump()\n\n workflow_run = await wait_for_workflow_run(\n hatchet, triggered_event.metadata.id, test_start\n )\n assert workflow_run is not None\n assert workflow_run.status == V1TaskStatus.COMPLETED\n assert workflow_run.additional_metadata is not None\n\n assert (\n workflow_run.additional_metadata["hatchet__event_id"]\n == triggered_event.metadata.id\n )\n assert workflow_run.additional_metadata["hatchet__event_key"] == triggered_event.key\n assert workflow_run.status == V1TaskStatus.COMPLETED\n\n\nasync def assert_event_not_created(\n hatchet: Hatchet,\n test_start: datetime,\n incoming_webhook: V1Webhook,\n) -> None:\n triggered_event = await wait_for_event(hatchet, incoming_webhook.name, test_start)\n assert triggered_event is None\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_basic_auth_success(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n "BASIC",\n {"username": TEST_BASIC_USERNAME, "password": TEST_BASIC_PASSWORD},\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {"message": "ok"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n "username,password",\n [\n ("test_user", "incorrect_password"),\n ("incorrect_user", "test_password"),\n ("incorrect_user", "incorrect_password"),\n ("", ""),\n ],\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_basic_auth_failure(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n username: str,\n password: str,\n) -> None:\n """Test basic authentication failures."""\n async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n "BASIC",\n {"username": username, "password": password},\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_basic_auth_missing_credentials(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name), webhook_body, "NONE"\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_api_key_success(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with api_key_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n "API_KEY",\n {"header_name": TEST_API_KEY_HEADER, "api_key": TEST_API_KEY_VALUE},\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {"message": "ok"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n "api_key",\n [\n "incorrect_api_key",\n "",\n "partial_key",\n ],\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_api_key_failure(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n api_key: str,\n) -> None:\n async with api_key_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n "API_KEY",\n {"header_name": TEST_API_KEY_HEADER, "api_key": api_key},\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_api_key_missing_header(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with api_key_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name), webhook_body, "NONE"\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_hmac_success(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with hmac_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n "HMAC",\n {\n "header_name": TEST_HMAC_SIGNATURE_HEADER,\n "secret": TEST_HMAC_SECRET,\n "algorithm": V1WebhookHMACAlgorithm.SHA256,\n "encoding": V1WebhookHMACEncoding.HEX,\n },\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {"message": "ok"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n "algorithm,encoding",\n [\n (V1WebhookHMACAlgorithm.SHA1, V1WebhookHMACEncoding.HEX),\n (V1WebhookHMACAlgorithm.SHA256, V1WebhookHMACEncoding.BASE64),\n (V1WebhookHMACAlgorithm.SHA512, V1WebhookHMACEncoding.BASE64URL),\n (V1WebhookHMACAlgorithm.MD5, V1WebhookHMACEncoding.HEX),\n ],\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_hmac_different_algorithms_and_encodings(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n algorithm: V1WebhookHMACAlgorithm,\n encoding: V1WebhookHMACEncoding,\n) -> None:\n async with hmac_webhook(\n hatchet, test_run_id, algorithm=algorithm, encoding=encoding\n ) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n "HMAC",\n {\n "header_name": TEST_HMAC_SIGNATURE_HEADER,\n "secret": TEST_HMAC_SECRET,\n "algorithm": algorithm,\n "encoding": encoding,\n },\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {"message": "ok"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n "secret",\n [\n "incorrect_secret",\n "",\n "partial_secret",\n ],\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_hmac_signature_failure(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n secret: str,\n) -> None:\n async with hmac_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n "HMAC",\n {\n "header_name": TEST_HMAC_SIGNATURE_HEADER,\n "secret": secret,\n "algorithm": V1WebhookHMACAlgorithm.SHA256,\n "encoding": V1WebhookHMACEncoding.HEX,\n },\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_hmac_missing_signature_header(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with hmac_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name), webhook_body, "NONE"\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n "source_name",\n [\n V1WebhookSourceName.GENERIC,\n V1WebhookSourceName.GITHUB,\n ],\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_different_source_types(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n source_name: V1WebhookSourceName,\n) -> None:\n async with basic_auth_webhook(\n hatchet, test_run_id, source_name=source_name\n ) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n "BASIC",\n {"username": TEST_BASIC_USERNAME, "password": TEST_BASIC_PASSWORD},\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {"message": "ok"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n', - source: 'out/python/webhooks/test_webhooks.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/worker.ts deleted file mode 100644 index 9e1b7fefb..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/webhooks/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > Webhooks\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass WebhookInput(BaseModel):\n type: str\n message: str\n\n\n@hatchet.task(input_validator=WebhookInput, on_events=["webhook:test"])\ndef webhook(input: WebhookInput, ctx: Context) -> dict[str, str]:\n return input.model_dump()\n\n\ndef main() -> None:\n worker = hatchet.worker("webhook-worker", workflows=[webhook])\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/webhooks/worker.py', - blocks: { - webhooks: { - start: 2, - stop: 24, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/worker.ts deleted file mode 100644 index c908f51cc..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'from examples.affinity_workers.worker import affinity_worker_workflow\nfrom examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf\nfrom examples.bulk_operations.worker import (\n bulk_replay_test_1,\n bulk_replay_test_2,\n bulk_replay_test_3,\n)\nfrom examples.cancellation.worker import cancellation_workflow\nfrom examples.concurrency_limit.worker import concurrency_limit_workflow\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow\nfrom examples.concurrency_workflow_level.worker import (\n concurrency_workflow_level_workflow,\n)\nfrom examples.conditions.worker import task_condition_workflow\nfrom examples.dag.worker import dag_workflow\nfrom examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf\nfrom examples.dependency_injection.worker import (\n async_task_with_dependencies,\n di_workflow,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n sync_task_with_dependencies,\n)\nfrom examples.durable.worker import durable_workflow, wait_for_sleep_twice\nfrom examples.events.worker import event_workflow\nfrom examples.fanout.worker import child_wf, parent_wf\nfrom examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent\nfrom examples.lifespans.simple import lifespan, lifespan_task\nfrom examples.logger.workflow import logging_workflow\nfrom examples.non_retryable.worker import non_retryable_workflow\nfrom examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details\nfrom examples.return_exceptions.worker import return_exceptions_task\nfrom examples.simple.worker import simple, simple_durable\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\nfrom examples.webhooks.worker import webhook\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "e2e-test-worker",\n slots=100,\n workflows=[\n affinity_worker_workflow,\n bulk_child_wf,\n bulk_parent_wf,\n concurrency_limit_workflow,\n concurrency_limit_rr_workflow,\n concurrency_multiple_keys_workflow,\n dag_workflow,\n dedupe_child_wf,\n dedupe_parent_wf,\n durable_workflow,\n child_wf,\n event_workflow,\n parent_wf,\n on_failure_wf,\n on_failure_wf_with_details,\n logging_workflow,\n timeout_wf,\n refresh_timeout_wf,\n task_condition_workflow,\n cancellation_workflow,\n sync_fanout_parent,\n sync_fanout_child,\n non_retryable_workflow,\n concurrency_workflow_level_workflow,\n di_workflow,\n lifespan_task,\n simple,\n simple_durable,\n bulk_replay_test_1,\n bulk_replay_test_2,\n bulk_replay_test_3,\n webhook,\n return_exceptions_task,\n wait_for_sleep_twice,\n async_task_with_dependencies,\n sync_task_with_dependencies,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n ],\n lifespan=lifespan,\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/worker_existing_loop/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/worker_existing_loop/index.ts deleted file mode 100644 index c1b323985..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/worker_existing_loop/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/worker_existing_loop/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/worker_existing_loop/worker.ts deleted file mode 100644 index 6de142995..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/worker_existing_loop/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - 'import asyncio\nfrom contextlib import suppress\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\nexisting_loop_worker = hatchet.workflow(name="WorkerExistingLoopWorkflow")\n\n\n@existing_loop_worker.task()\nasync def task(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print("started")\n await asyncio.sleep(10)\n print("finished")\n return {"result": "returned result"}\n\n\nasync def async_main() -> None:\n worker = None\n try:\n worker = hatchet.worker(\n "test-worker", slots=1, workflows=[existing_loop_worker]\n )\n worker.start()\n\n ref = existing_loop_worker.run_no_wait()\n print(await ref.aio_result())\n while True:\n await asyncio.sleep(1)\n finally:\n if worker:\n await worker.exit_gracefully()\n\n\ndef main() -> None:\n with suppress(KeyboardInterrupt):\n asyncio.run(async_main())\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/worker_existing_loop/worker.py', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/workflow_registration/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/workflow_registration/index.ts deleted file mode 100644 index c1b323985..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/workflow_registration/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/workflow_registration/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/workflow_registration/worker.ts deleted file mode 100644 index 335376ae5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/python/workflow_registration/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'python', - content: - '# > WorkflowRegistration\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\nwf_one = hatchet.workflow(name="wf_one")\nwf_two = hatchet.workflow(name="wf_two")\nwf_three = hatchet.workflow(name="wf_three")\nwf_four = hatchet.workflow(name="wf_four")\nwf_five = hatchet.workflow(name="wf_five")\n\n# define tasks here\n\n\ndef main() -> None:\n # 👀 Register workflows directly when instantiating the worker\n worker = hatchet.worker("test-worker", slots=1, workflows=[wf_one, wf_two])\n\n # 👀 Register a single workflow after instantiating the worker\n worker.register_workflow(wf_three)\n\n # 👀 Register multiple workflows in bulk after instantiating the worker\n worker.register_workflows([wf_four, wf_five])\n\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n', - source: 'out/python/workflow_registration/worker.py', - blocks: { - workflowregistration: { - start: 2, - stop: 28, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/types.ts b/frontend/app/src/next/lib/docs/generated/snips/types.ts deleted file mode 100644 index b85269595..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/types.ts +++ /dev/null @@ -1,31 +0,0 @@ -export type Highlight = { - lines: number[]; - strings: string[]; -}; - -export type Block = { - start: number; - stop: number; -}; - -// Types for snippets -export type Snippet = { - content: string; - language: string; - source: string; - blocks?: { - [key: string]: Block; - }; - highlights?: { - [key: string]: Highlight; - }; -}; - -export const LANGUAGE_MAP = { - ts: 'typescript ', - py: 'python', - go: 'go', - unknown: 'unknown', -}; - -export default {}; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/run.ts deleted file mode 100644 index c5c5631ed..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/run.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Running a Task with Results\nimport sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { cancellation } from './workflow';\nimport { hatchet } from '../hatchet-client';\n// ...\nasync function main() {\n const run = await cancellation.runNoWait({});\n const run1 = await cancellation.runNoWait({});\n\n await sleep(1000);\n\n await run.cancel();\n\n const res = await run.output;\n const res1 = await run1.output;\n\n console.log('canceled', res);\n console.log('completed', res1);\n\n await sleep(1000);\n\n await run.replay();\n\n const resReplay = await run.output;\n\n console.log(resReplay);\n\n const run2 = await cancellation.runNoWait({}, { additionalMetadata: { test: 'abc' } });\n const run4 = await cancellation.runNoWait({}, { additionalMetadata: { test: 'test' } });\n\n await sleep(1000);\n\n await hatchet.runs.cancel({\n filters: {\n since: new Date(Date.now() - 60 * 60),\n additionalMetadata: { test: 'test' },\n },\n });\n\n const res3 = await Promise.all([run2.output, run4.output]);\n console.log(res3);\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/cancellations/run.ts', - blocks: { - running_a_task_with_results: { - start: 2, - stop: 41, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/worker.ts deleted file mode 100644 index 0ef83304c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { cancellation } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('cancellation-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [cancellation],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 100,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/cancellations/worker.ts', - blocks: { - declaring_a_worker: { - start: 2, - stop: 18, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/workflow.ts deleted file mode 100644 index 483a42816..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/cancellations/workflow.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport axios from 'axios';\nimport { hatchet } from '../hatchet-client';\n\n// > Declaring a Task\nexport const cancellation = hatchet.task({\n name: 'cancellation',\n fn: async (_, ctx) => {\n await sleep(10 * 1000);\n\n if (ctx.cancelled) {\n throw new Error('Task was cancelled');\n }\n\n return {\n Completed: true,\n };\n },\n});\n\n// > Abort Signal\nexport const abortSignal = hatchet.task({\n name: 'abort-signal',\n fn: async (_, { abortController }) => {\n try {\n const response = await axios.get('https://api.example.com/data', {\n signal: abortController.signal,\n });\n // Handle the response\n } catch (error) {\n if (axios.isCancel(error)) {\n // Request was canceled\n console.log('Request canceled');\n } else {\n // Handle other errors\n }\n }\n },\n});\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - source: 'out/typescript/cancellations/workflow.ts', - blocks: { - declaring_a_task: { - start: 6, - stop: 19, - }, - abort_signal: { - start: 22, - stop: 39, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/run.ts deleted file mode 100644 index 18932b3bc..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { parent } from './workflow';\n\nasync function main() {\n const res = await parent.run({\n N: 10,\n });\n\n console.log(res.Result);\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - source: 'out/typescript/child_workflows/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/worker.ts deleted file mode 100644 index ef8eb8884..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { parent, child } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('child-workflow-worker', {\n workflows: [parent, child],\n slots: 100,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/child_workflows/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/workflow.ts deleted file mode 100644 index 530d8ee6e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/child_workflows/workflow.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring a Child\nimport { hatchet } from '../hatchet-client';\n\ntype ChildInput = {\n N: number;\n};\n\nexport const child = hatchet.task({\n name: 'child',\n fn: (input: ChildInput) => {\n return {\n Value: input.N,\n };\n },\n});\n\n// > Declaring a Parent\n\ntype ParentInput = {\n N: number;\n};\n\nexport const parent = hatchet.task({\n name: 'parent',\n fn: async (input: ParentInput, ctx) => {\n const n = input.N;\n const promises = [];\n\n for (let i = 0; i < n; i++) {\n promises.push(child.run({ N: i }));\n }\n\n const childRes = await Promise.all(promises);\n const sum = childRes.reduce((acc, curr) => acc + curr.Value, 0);\n\n return {\n Result: sum,\n };\n },\n});\n", - source: 'out/typescript/child_workflows/workflow.ts', - blocks: { - declaring_a_child: { - start: 2, - stop: 15, - }, - declaring_a_parent: { - start: 18, - stop: 40, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/index.ts deleted file mode 100644 index 381dcdc1d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import load from './load'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { load }; -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/load.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/load.ts deleted file mode 100644 index 56e8d1616..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/load.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simpleConcurrency } from './workflow';\n\nfunction generateRandomString(length: number): string {\n const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';\n let result = '';\n for (let i = 0; i < length; i++) {\n result += characters.charAt(Math.floor(Math.random() * characters.length));\n }\n return result;\n}\n\nasync function main() {\n const groupCount = 2;\n const runsPerGroup = 20_000;\n const BATCH_SIZE = 400;\n\n const workflowRuns = [];\n for (let i = 0; i < groupCount; i++) {\n for (let j = 0; j < runsPerGroup; j++) {\n workflowRuns.push({\n workflowName: simpleConcurrency.definition.name,\n input: {\n Message: generateRandomString(10),\n GroupKey: `group-${i}`,\n },\n });\n }\n }\n\n // Shuffle the workflow runs array\n for (let i = workflowRuns.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [workflowRuns[i], workflowRuns[j]] = [workflowRuns[j], workflowRuns[i]];\n }\n\n // Process workflows in batches\n for (let i = 0; i < workflowRuns.length; i += BATCH_SIZE) {\n const batch = workflowRuns.slice(i, i + BATCH_SIZE);\n await hatchet.admin.runWorkflows(batch);\n }\n}\n\nif (require.main === module) {\n main().then(() => process.exit(0));\n}\n", - source: 'out/typescript/concurrency-rr/load.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/run.ts deleted file mode 100644 index 55601b078..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { simpleConcurrency } from './workflow';\n\nasync function main() {\n const res = await simpleConcurrency.run([\n {\n Message: 'Hello World',\n GroupKey: 'A',\n },\n {\n Message: 'Goodbye Moon',\n GroupKey: 'A',\n },\n {\n Message: 'Hello World B',\n GroupKey: 'B',\n },\n ]);\n\n console.log(res[0]['to-lower'].TransformedMessage);\n console.log(res[1]['to-lower'].TransformedMessage);\n console.log(res[2]['to-lower'].TransformedMessage);\n}\n\nif (require.main === module) {\n main().then(() => process.exit(0));\n}\n", - source: 'out/typescript/concurrency-rr/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/worker.ts deleted file mode 100644 index 4ce0c02a5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simpleConcurrency } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('simple-concurrency-worker', {\n workflows: [simpleConcurrency],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/concurrency-rr/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/workflow.ts deleted file mode 100644 index 73a3ef0b9..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/concurrency-rr/workflow.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/workflow';\nimport { hatchet } from '../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n GroupKey: string;\n};\n\ntype SimpleOutput = {\n 'to-lower': {\n TransformedMessage: string;\n };\n};\n\nconst sleep = (ms: number) =>\n new Promise((resolve) => {\n setTimeout(resolve, ms);\n });\n\n// > Concurrency Strategy With Key\nexport const simpleConcurrency = hatchet.workflow({\n name: 'simple-concurrency',\n concurrency: {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.GroupKey',\n },\n});\n\nsimpleConcurrency.task({\n name: 'to-lower',\n fn: async (input) => {\n await sleep(Math.floor(Math.random() * (1000 - 200 + 1)) + 200);\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\n// > Multiple Concurrency Keys\nexport const multipleConcurrencyKeys = hatchet.workflow({\n name: 'simple-concurrency',\n concurrency: [\n {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.Tier',\n },\n {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.Account',\n },\n ],\n});\n\nmultipleConcurrencyKeys.task({\n name: 'to-lower',\n fn: async (input) => {\n await sleep(Math.floor(Math.random() * (1000 - 200 + 1)) + 200);\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - source: 'out/typescript/concurrency-rr/workflow.ts', - blocks: { - concurrency_strategy_with_key: { - start: 21, - stop: 28, - }, - multiple_concurrency_keys: { - start: 41, - stop: 55, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/index.ts deleted file mode 100644 index b711c8f0f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import interface_workflow from './interface-workflow'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { interface_workflow }; -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/interface-workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/interface-workflow.ts deleted file mode 100644 index 695e813ec..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/interface-workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { WorkflowInputType, WorkflowOutputType } from '@hatchet-dev/typescript-sdk/v1';\nimport { hatchet } from '../hatchet-client';\n\ninterface DagInput extends WorkflowInputType {\n Message: string;\n}\n\ninterface DagOutput extends WorkflowOutputType {\n reverse: {\n Original: string;\n Transformed: string;\n };\n}\n\n// > Declaring a DAG Workflow\n// First, we declare the workflow\nexport const dag = hatchet.workflow({\n name: 'simple',\n});\n\nconst reverse = dag.task({\n name: 'reverse',\n fn: (input) => {\n return {\n Original: input.Message,\n Transformed: input.Message.split('').reverse().join(''),\n };\n },\n});\n\ndag.task({\n name: 'to-lower',\n parents: [reverse],\n fn: async (input, ctx) => {\n const r = await ctx.parentOutput(reverse);\n\n return {\n reverse: {\n Original: r.Transformed,\n Transformed: r.Transformed.toLowerCase(),\n },\n };\n },\n});\n", - source: 'out/typescript/dag/interface-workflow.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/run.ts deleted file mode 100644 index 5b1224cd5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { dag } from './workflow';\n\nasync function main() {\n const res = await dag.run({\n Message: 'hello world',\n });\n\n console.log(res.reverse.Transformed);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/dag/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/worker.ts deleted file mode 100644 index 38d226698..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { dag } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('dag-worker', {\n workflows: [dag],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/dag/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/workflow.ts deleted file mode 100644 index 014e56025..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\ntype DagInput = {\n Message: string;\n};\n\ntype DagOutput = {\n reverse: {\n Original: string;\n Transformed: string;\n };\n};\n\n// > Declaring a DAG Workflow\n// First, we declare the workflow\nexport const dag = hatchet.workflow({\n name: 'simple',\n});\n\n// Next, we declare the tasks bound to the workflow\nconst toLower = dag.task({\n name: 'to-lower',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\n// Next, we declare the tasks bound to the workflow\ndag.task({\n name: 'reverse',\n parents: [toLower],\n fn: async (input, ctx) => {\n const lower = await ctx.parentOutput(toLower);\n return {\n Original: input.Message,\n Transformed: lower.TransformedMessage.split('').reverse().join(''),\n };\n },\n});\n", - source: 'out/typescript/dag/workflow.ts', - blocks: { - declaring_a_dag_workflow: { - start: 15, - stop: 41, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/complex-workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/complex-workflow.ts deleted file mode 100644 index 4a9ea5760..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/complex-workflow.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Create a workflow\nimport { Or, SleepCondition, UserEventCondition } from '@hatchet-dev/typescript-sdk/v1/conditions';\nimport { ParentCondition } from '@hatchet-dev/typescript-sdk/v1/conditions/parent-condition';\nimport { Context } from '@hatchet-dev/typescript-sdk/v1/client/worker/context';\nimport { hatchet } from '../hatchet-client';\n\nexport const taskConditionWorkflow = hatchet.workflow({\n name: 'TaskConditionWorkflow',\n});\n\n// > Add base task\nconst start = taskConditionWorkflow.task({\n name: 'start',\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add wait for sleep\nconst waitForSleep = taskConditionWorkflow.task({\n name: 'waitForSleep',\n parents: [start],\n waitFor: [new SleepCondition('10s')],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add skip on event\nconst skipOnEvent = taskConditionWorkflow.task({\n name: 'skipOnEvent',\n parents: [start],\n waitFor: [new SleepCondition('10s')],\n skipIf: [new UserEventCondition('skip_on_event:skip', 'true')],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add branching\nconst leftBranch = taskConditionWorkflow.task({\n name: 'leftBranch',\n parents: [waitForSleep],\n skipIf: [new ParentCondition(waitForSleep, 'output.randomNumber > 50')],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\nconst rightBranch = taskConditionWorkflow.task({\n name: 'rightBranch',\n parents: [waitForSleep],\n skipIf: [new ParentCondition(waitForSleep, 'output.randomNumber <= 50')],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add wait for event\nconst waitForEvent = taskConditionWorkflow.task({\n name: 'waitForEvent',\n parents: [start],\n waitFor: [Or(new SleepCondition('1m'), new UserEventCondition('wait_for_event:start', 'true'))],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add sum\ntaskConditionWorkflow.task({\n name: 'sum',\n parents: [start, waitForSleep, waitForEvent, skipOnEvent, leftBranch, rightBranch],\n fn: async (_, ctx: Context) => {\n const one = (await ctx.parentOutput(start)).randomNumber;\n const two = (await ctx.parentOutput(waitForEvent)).randomNumber;\n const three = (await ctx.parentOutput(waitForSleep)).randomNumber;\n const four = (await ctx.parentOutput(skipOnEvent))?.randomNumber || 0;\n const five = (await ctx.parentOutput(leftBranch))?.randomNumber || 0;\n const six = (await ctx.parentOutput(rightBranch))?.randomNumber || 0;\n\n return {\n sum: one + two + three + four + five + six,\n };\n },\n});\n", - source: 'out/typescript/dag_match_condition/complex-workflow.ts', - blocks: { - create_a_workflow: { - start: 2, - stop: 9, - }, - add_base_task: { - start: 12, - stop: 19, - }, - add_wait_for_sleep: { - start: 22, - stop: 31, - }, - add_skip_on_event: { - start: 34, - stop: 44, - }, - add_branching: { - start: 47, - stop: 67, - }, - add_wait_for_event: { - start: 70, - stop: 79, - }, - add_sum: { - start: 82, - stop: 97, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/event.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/event.ts deleted file mode 100644 index bacf9cb28..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/event.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\nasync function main() {\n const event = await hatchet.events.push('user:event', {\n Data: { Hello: 'World' },\n });\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - source: 'out/typescript/dag_match_condition/event.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/index.ts deleted file mode 100644 index 08d40e554..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import complex_workflow from './complex-workflow'; -import event from './event'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { complex_workflow }; -export { event }; -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/run.ts deleted file mode 100644 index f0cd92bfe..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { dagWithConditions } from './workflow';\n\nasync function main() {\n const res = await dagWithConditions.run({});\n\n console.log(res['first-task'].Completed);\n console.log(res['second-task'].Completed);\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/dag_match_condition/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/worker.ts deleted file mode 100644 index 42fc4f351..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { dagWithConditions } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('dag-worker', {\n workflows: [dagWithConditions],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/dag_match_condition/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/workflow.ts deleted file mode 100644 index 92637c86b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/dag_match_condition/workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { Or } from '@hatchet-dev/typescript-sdk/v1/conditions';\nimport { hatchet } from '../hatchet-client';\n\ntype DagInput = {};\n\ntype DagOutput = {\n 'first-task': {\n Completed: boolean;\n };\n 'second-task': {\n Completed: boolean;\n };\n};\n\nexport const dagWithConditions = hatchet.workflow({\n name: 'simple',\n});\n\nconst firstTask = dagWithConditions.task({\n name: 'first-task',\n fn: async () => {\n await sleep(2000);\n return {\n Completed: true,\n };\n },\n});\n\ndagWithConditions.task({\n name: 'second-task',\n parents: [firstTask],\n waitFor: Or({ eventKey: 'user:event' }, { sleepFor: '10s' }),\n fn: async (_, ctx) => {\n console.log('triggered by condition', ctx.triggers());\n\n return {\n Completed: true,\n };\n },\n});\n", - source: 'out/typescript/dag_match_condition/workflow.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/run.ts deleted file mode 100644 index 7b3fb165f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { parent } from './workflow';\n\nasync function main() {\n const res = await parent.run({\n Message: 'hello',\n N: 5,\n });\n\n console.log(res.parent.Sum);\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/deep/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/worker.ts deleted file mode 100644 index dc4262c6f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { parent, child1, child2, child3, child4, child5 } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('simple-worker', {\n workflows: [parent, child1, child2, child3, child4, child5],\n slots: 5000,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/deep/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/workflow.ts deleted file mode 100644 index 6f4203aea..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/deep/workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n N: number;\n};\n\ntype Output = {\n transformer: {\n Sum: number;\n };\n};\n\nexport const child1 = hatchet.workflow({\n name: 'child1',\n});\n\nchild1.task({\n name: 'transformer',\n fn: () => {\n sleep(15);\n return {\n Sum: 1,\n };\n },\n});\n\nexport const child2 = hatchet.workflow({\n name: 'child2',\n});\n\nchild2.task({\n name: 'transformer',\n fn: async (input, ctx) => {\n const count = input.N;\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child1, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n sleep(15);\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n\nexport const child3 = hatchet.workflow({\n name: 'child3',\n});\n\nchild3.task({\n name: 'transformer',\n fn: async (input, ctx) => {\n const count = input.N;\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child2, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n\nexport const child4 = hatchet.workflow({\n name: 'child4',\n});\n\nchild4.task({\n name: 'transformer',\n fn: async (input, ctx) => {\n const count = input.N;\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child3, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n\nexport const child5 = hatchet.workflow({\n name: 'child5',\n});\n\nchild5.task({\n name: 'transformer',\n fn: async (input, ctx) => {\n const count = input.N;\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child4, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n\nexport const parent = hatchet.workflow({\n name: 'parent',\n});\n\nparent.task({\n name: 'parent',\n fn: async (input, ctx) => {\n const count = input.N; // Random number between 2-4\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child5, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n", - source: 'out/typescript/deep/workflow.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/event.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/event.ts deleted file mode 100644 index d95774daa..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/event.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\nasync function main() {\n const event = await hatchet.events.push('user:update', {\n userId: '1234',\n });\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - source: 'out/typescript/durable-event/event.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/index.ts deleted file mode 100644 index 73fb5b0af..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import event from './event'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { event }; -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/run.ts deleted file mode 100644 index 63d20a89b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { durableEvent } from './workflow';\n\nasync function main() {\n const timeStart = Date.now();\n const res = await durableEvent.run({});\n const timeEnd = Date.now();\n console.log(`Time taken: ${timeEnd - timeStart}ms`);\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - source: 'out/typescript/durable-event/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/worker.ts deleted file mode 100644 index ccd49ff08..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { durableEvent } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('durable-event-worker', {\n workflows: [durableEvent],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/durable-event/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/workflow.ts deleted file mode 100644 index 21d19fc3b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-event/workflow.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\n// > Durable Event\nexport const durableEvent = hatchet.durableTask({\n name: 'durable-event',\n executionTimeout: '10m',\n fn: async (_, ctx) => {\n const res = ctx.waitFor({\n eventKey: 'user:update',\n });\n\n console.log('res', res);\n\n return {\n Value: 'done',\n };\n },\n});\n\nexport const durableEventWithFilter = hatchet.durableTask({\n name: 'durable-event-with-filter',\n executionTimeout: '10m',\n fn: async (_, ctx) => {\n // > Durable Event With Filter\n const res = ctx.waitFor({\n eventKey: 'user:update',\n expression: \"input.userId == '1234'\",\n });\n\n console.log('res', res);\n\n return {\n Value: 'done',\n };\n },\n});\n", - source: 'out/typescript/durable-event/workflow.ts', - blocks: { - durable_event: { - start: 5, - stop: 19, - }, - durable_event_with_filter: { - start: 26, - stop: 29, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/event.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/event.ts deleted file mode 100644 index 0a34662c1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/event.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\nasync function main() {\n const event = await hatchet.events.push('user:event', {\n Data: { Hello: 'World' },\n });\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - source: 'out/typescript/durable-sleep/event.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/index.ts deleted file mode 100644 index 73fb5b0af..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import event from './event'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { event }; -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/run.ts deleted file mode 100644 index 9f028b761..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { durableSleep } from './workflow';\n\nasync function main() {\n const timeStart = Date.now();\n const res = await durableSleep.run({});\n const timeEnd = Date.now();\n console.log(`Time taken: ${timeEnd - timeStart}ms`);\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - source: 'out/typescript/durable-sleep/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/worker.ts deleted file mode 100644 index 6f69f36d4..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { durableSleep } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('sleep-worker', {\n workflows: [durableSleep],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/durable-sleep/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/workflow.ts deleted file mode 100644 index 207d49a9c..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/durable-sleep/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\nexport const durableSleep = hatchet.workflow({\n name: 'durable-sleep',\n});\n\n// > Durable Sleep\ndurableSleep.durableTask({\n name: 'durable-sleep',\n executionTimeout: '10m',\n fn: async (_, ctx) => {\n console.log('sleeping for 5s');\n const sleepRes = await ctx.sleepFor('5s');\n console.log('done sleeping for 5s', sleepRes);\n\n return {\n Value: 'done',\n };\n },\n});\n", - source: 'out/typescript/durable-sleep/workflow.ts', - blocks: { - durable_sleep: { - start: 9, - stop: 21, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/hatchet-client.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/hatchet-client.ts deleted file mode 100644 index c6c2a4d4b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/hatchet-client.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { HatchetClient } from '@hatchet-dev/typescript-sdk/v1';\n\nexport const hatchet = HatchetClient.init();\n", - source: 'out/typescript/hatchet-client.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/child-worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/child-worker.ts deleted file mode 100644 index fd039eae5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/child-worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// ❓ Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { child } from './workflow-with-child';\n\nasync function main() {\n const worker = await hatchet.worker('child-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [child],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 1000,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/high-memory/child-worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/index.ts deleted file mode 100644 index 6edd844f0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import child_worker from './child-worker'; -import parent_worker from './parent-worker'; -import run from './run'; -import workflow_with_child from './workflow-with-child'; - -export { child_worker }; -export { parent_worker }; -export { run }; -export { workflow_with_child }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/parent-worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/parent-worker.ts deleted file mode 100644 index 00994c13b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/parent-worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// ❓ Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { parent } from './workflow-with-child';\n\nasync function main() {\n const worker = await hatchet.worker('parent-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [parent],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 20,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/high-memory/parent-worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/run.ts deleted file mode 100644 index e773de952..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { parent } from './workflow-with-child';\n\nasync function main() {\n // ❓ Running a Task\n const res = await parent.run({\n Message: 'HeLlO WoRlD',\n });\n\n // 👀 Access the results of the Task\n console.log(res.TransformedMessage);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/high-memory/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/workflow-with-child.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/workflow-with-child.ts deleted file mode 100644 index 54529b9b1..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/high-memory/workflow-with-child.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// ❓ Declaring a Task\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type ChildInput = {\n Message: string;\n};\n\nexport type ParentInput = {\n Message: string;\n};\n\nexport const child = hatchet.task({\n name: 'child',\n fn: (input: ChildInput) => {\n const largePayload = new Array(1024 * 1024).fill('a').join('');\n\n return {\n TransformedMessage: largePayload,\n };\n },\n});\n\nexport const parent = hatchet.task({\n name: 'parent',\n timeout: '10m',\n fn: async (input: ParentInput, ctx) => {\n // lets generate large payload 1 mb\n const largePayload = new Array(1024 * 1024).fill('a').join('');\n\n // Send the large payload 100 times\n const num = 1000;\n\n const children = [];\n for (let i = 0; i < num; i += 1) {\n children.push({\n workflow: child,\n input: {\n Message: `Iteration ${i + 1}: ${largePayload}`,\n },\n });\n }\n\n await ctx.bulkRunNoWaitChildren(children);\n\n return {\n TransformedMessage: 'done',\n };\n },\n});\n\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - source: 'out/typescript/high-memory/workflow-with-child.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/index.ts deleted file mode 100644 index 21b216bd6..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/index.ts +++ /dev/null @@ -1,59 +0,0 @@ -import hatchet_client from './hatchet-client'; -import * as cancellations from './cancellations'; -import * as child_workflows from './child_workflows'; -import * as concurrency_rr from './concurrency-rr'; -import * as dag from './dag'; -import * as dag_match_condition from './dag_match_condition'; -import * as deep from './deep'; -import * as durable_event from './durable-event'; -import * as durable_sleep from './durable-sleep'; -import * as high_memory from './high-memory'; -import * as inferred_typing from './inferred-typing'; -import * as landing_page from './landing_page'; -import * as legacy from './legacy'; -import * as migration_guides from './migration-guides'; -import * as multiple_wf_concurrency from './multiple_wf_concurrency'; -import * as non_retryable from './non_retryable'; -import * as on_cron from './on_cron'; -import * as on_event from './on_event'; -import * as on_failure from './on_failure'; -import * as on_success from './on_success'; -import * as priority from './priority'; -import * as quickstart from './quickstart'; -import * as rate_limit from './rate_limit'; -import * as retries from './retries'; -import * as simple from './simple'; -import * as sticky from './sticky'; -import * as streaming from './streaming'; -import * as timeouts from './timeouts'; -import * as with_timeouts from './with_timeouts'; - -export { hatchet_client }; -export { cancellations }; -export { child_workflows }; -export { concurrency_rr }; -export { dag }; -export { dag_match_condition }; -export { deep }; -export { durable_event }; -export { durable_sleep }; -export { high_memory }; -export { inferred_typing }; -export { landing_page }; -export { legacy }; -export { migration_guides }; -export { multiple_wf_concurrency }; -export { non_retryable }; -export { on_cron }; -export { on_event }; -export { on_failure }; -export { on_success }; -export { priority }; -export { quickstart }; -export { rate_limit }; -export { retries }; -export { simple }; -export { sticky }; -export { streaming }; -export { timeouts }; -export { with_timeouts }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/run.ts deleted file mode 100644 index 27bf35f0b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { crazyWorkflow, declaredType, inferredType, inferredTypeDurable } from './workflow';\n\nasync function main() {\n const declaredTypeRun = declaredType.run({\n Message: 'hello',\n });\n\n const inferredTypeRun = inferredType.run({\n Message: 'hello',\n });\n\n const crazyWorkflowRun = crazyWorkflow.run({\n Message: 'hello',\n });\n\n const inferredTypeDurableRun = inferredTypeDurable.run({\n Message: 'Durable Task',\n });\n\n const [declaredTypeResult, inferredTypeResult, inferredTypeDurableResult, crazyWorkflowResult] =\n await Promise.all([declaredTypeRun, inferredTypeRun, inferredTypeDurableRun, crazyWorkflowRun]);\n\n console.log('declaredTypeResult', declaredTypeResult);\n console.log('inferredTypeResult', inferredTypeResult);\n console.log('inferredTypeDurableResult', inferredTypeDurableResult);\n console.log('crazyWorkflowResult', crazyWorkflowResult);\n console.log('declaredTypeResult.TransformedMessage', declaredTypeResult.TransformedMessage);\n console.log('inferredTypeResult.TransformedMessage', inferredTypeResult.TransformedMessage);\n console.log(\n 'inferredTypeDurableResult.TransformedMessage',\n inferredTypeDurableResult.TransformedMessage\n );\n console.log('crazyWorkflowResult.TransformedMessage', crazyWorkflowResult.TransformedMessage);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/inferred-typing/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/worker.ts deleted file mode 100644 index a9152eedf..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { declaredType, inferredType, inferredTypeDurable, crazyWorkflow } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('simple-worker', {\n workflows: [declaredType, inferredType, inferredTypeDurable, crazyWorkflow],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/inferred-typing/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/workflow.ts deleted file mode 100644 index 7101a8ea8..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/inferred-typing/workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n};\n\ntype SimpleOutput = {\n TransformedMessage: string;\n};\n\nexport const declaredType = hatchet.task({\n name: 'declared-type',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nexport const inferredType = hatchet.task({\n name: 'inferred-type',\n fn: (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n\nexport const inferredTypeDurable = hatchet.durableTask({\n name: 'inferred-type-durable',\n fn: async (input: SimpleInput, ctx) => {\n // await ctx.sleepFor('5s');\n\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n\nexport const crazyWorkflow = hatchet.workflow({\n name: 'crazy-workflow',\n});\n\nconst step1 = crazyWorkflow.task(declaredType);\n// crazyWorkflow.task(inferredTypeDurable);\n\ncrazyWorkflow.task({\n parents: [step1],\n ...inferredType.taskDef,\n});\n", - source: 'out/typescript/inferred-typing/workflow.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/durable-excution.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/durable-excution.ts deleted file mode 100644 index ea35ab142..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/durable-excution.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { Or } from '@hatchet-dev/typescript-sdk/v1/conditions';\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\nasync function main() {\n // > Declaring a Durable Task\n const simple = hatchet.durableTask({\n name: 'simple',\n fn: async (input: SimpleInput, ctx) => {\n await ctx.waitFor(\n Or(\n {\n eventKey: 'user:pay',\n expression: 'input.Status == \"PAID\"',\n },\n {\n sleepFor: '24h',\n }\n )\n );\n\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n });\n\n // > Running a Task\n const result = await simple.run({ Message: 'Hello, World!' });\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/landing_page/durable-excution.ts', - blocks: { - declaring_a_durable_task: { - start: 10, - stop: 29, - }, - running_a_task: { - start: 32, - stop: 32, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/event-signaling.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/event-signaling.ts deleted file mode 100644 index b5791ab74..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/event-signaling.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// > Trigger on an event\nexport const simple = hatchet.task({\n name: 'simple',\n onEvents: ['user:created'],\n fn: (input: SimpleInput) => {\n // ...\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - source: 'out/typescript/landing_page/event-signaling.ts', - blocks: { - trigger_on_an_event: { - start: 9, - stop: 18, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/flow-control.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/flow-control.ts deleted file mode 100644 index b6c3d36d2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/flow-control.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/protoc/v1/workflows';\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// > Process what you can handle\nexport const simple = hatchet.task({\n name: 'simple',\n concurrency: {\n expression: 'input.user_id',\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n maxRuns: 1,\n },\n rateLimits: [\n {\n key: 'api_throttle',\n units: 1,\n },\n ],\n fn: (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - source: 'out/typescript/landing_page/flow-control.ts', - blocks: { - process_what_you_can_handle: { - start: 10, - stop: 28, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/index.ts deleted file mode 100644 index 5da73c6ef..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import durable_excution from './durable-excution'; -import event_signaling from './event-signaling'; -import flow_control from './flow-control'; -import queues from './queues'; -import scheduling from './scheduling'; -import task_routing from './task-routing'; - -export { durable_excution }; -export { event_signaling }; -export { flow_control }; -export { queues }; -export { scheduling }; -export { task_routing }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/queues.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/queues.ts deleted file mode 100644 index 9971a884b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/queues.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\nasync function main() {\n // > Declaring a Task\n const simple = hatchet.task({\n name: 'simple',\n fn: (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n });\n\n // > Running a Task\n const result = await simple.run({ Message: 'Hello, World!' });\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/landing_page/queues.ts', - blocks: { - declaring_a_task: { - start: 9, - stop: 16, - }, - running_a_task: { - start: 19, - stop: 19, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/scheduling.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/scheduling.ts deleted file mode 100644 index 6c262270b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/scheduling.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { simple } from './flow-control';\n\n// > Schedules and Crons\nconst tomorrow = new Date(Date.now() + 1000 * 60 * 60 * 24);\nconst scheduled = simple.schedule(tomorrow, {\n Message: 'Hello, World!',\n});\n\nconst cron = simple.cron('every-day', '0 0 * * *', {\n Message: 'Hello, World!',\n});\n", - source: 'out/typescript/landing_page/scheduling.ts', - blocks: { - schedules_and_crons: { - start: 4, - stop: 11, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/task-routing.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/task-routing.ts deleted file mode 100644 index 535774f17..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/landing_page/task-routing.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// > Route tasks to workers with matching labels\nexport const simple = hatchet.task({\n name: 'simple',\n desiredWorkerLabels: {\n cpu: {\n value: '2x',\n },\n },\n fn: (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nhatchet.worker('task-routing-worker', {\n workflows: [simple],\n labels: {\n cpu: process.env.CPU_LABEL,\n },\n});\n", - source: 'out/typescript/landing_page/task-routing.ts', - blocks: { - route_tasks_to_workers_with_matching_labels: { - start: 9, - stop: 28, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/run.ts deleted file mode 100644 index 2842fc35f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n const res = await hatchet.run<{ Message: string }, { step2: string }>(simple, {\n Message: 'hello',\n });\n\n console.log(res.step2);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/legacy/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/worker.ts deleted file mode 100644 index 769ff7ea2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('legacy-worker', {\n workflows: [simple],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/legacy/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/workflow.ts deleted file mode 100644 index 217373618..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/legacy/workflow.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { Workflow } from '@hatchet-dev/typescript-sdk/workflow';\n\nexport const simple: Workflow = {\n id: 'legacy-workflow',\n description: 'test',\n on: {\n event: 'user:create',\n },\n steps: [\n {\n name: 'step1',\n run: async (ctx) => {\n const input = ctx.workflowInput();\n\n return { step1: `original input: ${input.Message}` };\n },\n },\n {\n name: 'step2',\n parents: ['step1'],\n run: (ctx) => {\n const step1Output = ctx.stepOutput('step1');\n\n return { step2: `step1 output: ${step1Output.step1}` };\n },\n },\n ],\n};\n", - source: 'out/typescript/legacy/workflow.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/hatchet-client.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/hatchet-client.ts deleted file mode 100644 index 25081a699..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/hatchet-client.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import HatchetClient from '@hatchet-dev/typescript-sdk/sdk';\n\nexport const hatchet = HatchetClient.init();\n", - source: 'out/typescript/migration-guides/hatchet-client.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/index.ts deleted file mode 100644 index e3b7e4fb7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import hatchet_client from './hatchet-client'; -import mergent from './mergent'; - -export { hatchet_client }; -export { mergent }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/mergent.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/mergent.ts deleted file mode 100644 index 2cd4989ce..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/migration-guides/mergent.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from './hatchet-client';\n\nfunction processImage(\n imageUrl: string,\n filters: string[]\n): Promise<{ url: string; size: number; format: string }> {\n // Do some image processing\n return Promise.resolve({ url: imageUrl, size: 100, format: 'png' });\n}\n// > Before (Mergent)\nexport async function processImageTask(req: { body: { imageUrl: string; filters: string[] } }) {\n const { imageUrl, filters } = req.body;\n try {\n const result = await processImage(imageUrl, filters);\n return { success: true, processedUrl: result.url };\n } catch (error) {\n console.error('Image processing failed:', error);\n throw error;\n }\n}\n\n// > After (Hatchet)\ntype ImageProcessInput = {\n imageUrl: string;\n filters: string[];\n};\n\ntype ImageProcessOutput = {\n processedUrl: string;\n metadata: {\n size: number;\n format: string;\n appliedFilters: string[];\n };\n};\n\nexport const imageProcessor = hatchet.task({\n name: 'image-processor',\n retries: 3,\n executionTimeout: '10m',\n fn: async ({ imageUrl, filters }: ImageProcessInput): Promise => {\n // Do some image processing\n const result = await processImage(imageUrl, filters);\n\n if (!result.url) throw new Error('Processing failed to generate URL');\n\n return {\n processedUrl: result.url,\n metadata: {\n size: result.size,\n format: result.format,\n appliedFilters: filters,\n },\n };\n },\n});\n\nasync function run() {\n // > Running a task (Mergent)\n const options = {\n method: 'POST',\n headers: { Authorization: 'Bearer ', 'Content-Type': 'application/json' },\n body: JSON.stringify({\n name: '4cf95241-fa19-47ef-8a67-71e483747649',\n queue: 'default',\n request: {\n url: 'https://example.com',\n headers: { Authorization: 'fake-secret-token', 'Content-Type': 'application/json' },\n body: 'Hello, world!',\n },\n }),\n };\n\n fetch('https://api.mergent.co/v2/tasks', options)\n .then((response) => response.json())\n .then((response) => console.log(response))\n .catch((err) => console.error(err));\n\n // > Running a task (Hatchet)\n const result = await imageProcessor.run({\n imageUrl: 'https://example.com/image.png',\n filters: ['blur'],\n });\n\n // you can await fully typed results\n console.log(result);\n}\n\nasync function schedule() {\n // > Scheduling tasks (Mergent)\n const options = {\n // same options as before\n body: JSON.stringify({\n // same body as before\n delay: '5m',\n }),\n };\n\n // > Scheduling tasks (Hatchet)\n // Schedule the task to run at a specific time\n const runAt = new Date(Date.now() + 1000 * 60 * 60 * 24);\n imageProcessor.schedule(runAt, {\n imageUrl: 'https://example.com/image.png',\n filters: ['blur'],\n });\n\n // Schedule the task to run every hour\n imageProcessor.cron('run-hourly', '0 * * * *', {\n imageUrl: 'https://example.com/image.png',\n filters: ['blur'],\n });\n}\n", - source: 'out/typescript/migration-guides/mergent.ts', - blocks: { - before_mergent: { - start: 11, - stop: 20, - }, - after_hatchet: { - start: 23, - stop: 56, - }, - running_a_task_mergent: { - start: 60, - stop: 77, - }, - running_a_task_hatchet: { - start: 80, - stop: 86, - }, - scheduling_tasks_mergent: { - start: 91, - stop: 97, - }, - scheduling_tasks_hatchet: { - start: 100, - stop: 111, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/run.ts deleted file mode 100644 index dc8698029..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { multiConcurrency } from './workflow';\n\nasync function main() {\n const res = await multiConcurrency.run([\n {\n Message: 'Hello World',\n GroupKey: 'A',\n },\n {\n Message: 'Goodbye Moon',\n GroupKey: 'A',\n },\n {\n Message: 'Hello World B',\n GroupKey: 'B',\n },\n ]);\n\n console.log(res[0]['to-lower'].TransformedMessage);\n console.log(res[1]['to-lower'].TransformedMessage);\n console.log(res[2]['to-lower'].TransformedMessage);\n}\n\nif (require.main === module) {\n main().then(() => process.exit(0));\n}\n", - source: 'out/typescript/multiple_wf_concurrency/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/worker.ts deleted file mode 100644 index 4c9ad60a3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { multiConcurrency } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('simple-concurrency-worker', {\n workflows: [multiConcurrency],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/multiple_wf_concurrency/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/workflow.ts deleted file mode 100644 index 4ef745bb5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/multiple_wf_concurrency/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/workflow';\nimport { hatchet } from '../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n GroupKey: string;\n};\n\ntype SimpleOutput = {\n 'to-lower': {\n TransformedMessage: string;\n };\n};\n\nconst sleep = (ms: number) =>\n new Promise((resolve) => {\n setTimeout(resolve, ms);\n });\n\n// > Concurrency Strategy With Key\nexport const multiConcurrency = hatchet.workflow({\n name: 'simple-concurrency',\n concurrency: [\n {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.GroupKey',\n },\n {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.UserId',\n },\n ],\n});\n\nmultiConcurrency.task({\n name: 'to-lower',\n fn: async (input) => {\n await sleep(Math.floor(Math.random() * (1000 - 200 + 1)) + 200);\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - source: 'out/typescript/multiple_wf_concurrency/workflow.ts', - blocks: { - concurrency_strategy_with_key: { - start: 21, - stop: 35, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/run.ts deleted file mode 100644 index 44baf68ad..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { nonRetryableWorkflow } from './workflow';\n\nasync function main() {\n const res = await nonRetryableWorkflow.runNoWait({});\n\n console.log(res);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/non_retryable/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/worker.ts deleted file mode 100644 index 68c8854b5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { nonRetryableWorkflow } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('no-retry-worker', {\n workflows: [nonRetryableWorkflow],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/non_retryable/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/workflow.ts deleted file mode 100644 index 2a3d309c3..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/non_retryable/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { NonRetryableError } from '@hatchet-dev/typescript-sdk/v1/task';\nimport { hatchet } from '../hatchet-client';\n\nexport const nonRetryableWorkflow = hatchet.workflow({\n name: 'no-retry-workflow',\n});\n\n// > Non-retrying task\nconst shouldNotRetry = nonRetryableWorkflow.task({\n name: 'should-not-retry',\n fn: () => {\n throw new NonRetryableError('This task should not retry');\n },\n retries: 1,\n});\n\n// Create a task that should retry\nconst shouldRetryWrongErrorType = nonRetryableWorkflow.task({\n name: 'should-retry-wrong-error-type',\n fn: () => {\n throw new Error('This task should not retry');\n },\n retries: 1,\n});\n\nconst shouldNotRetrySuccessfulTask = nonRetryableWorkflow.task({\n name: 'should-not-retry-successful-task',\n fn: () => {},\n});\n", - source: 'out/typescript/non_retryable/workflow.ts', - blocks: { - non_retrying_task: { - start: 9, - stop: 15, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/index.ts deleted file mode 100644 index 73d61f7ac..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import worker from './worker'; -import workflow from './workflow'; - -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/worker.ts deleted file mode 100644 index d86b2762f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { onCron } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('on-cron-worker', {\n workflows: [onCron],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/on_cron/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/workflow.ts deleted file mode 100644 index 5b83050ac..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_cron/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\nexport type Input = {\n Message: string;\n};\n\ntype OnCronOutput = {\n job: {\n TransformedMessage: string;\n };\n};\n\n// > Run Workflow on Cron\nexport const onCron = hatchet.workflow({\n name: 'on-cron-workflow',\n on: {\n // 👀 add a cron expression to run the workflow every 15 minutes\n cron: '*/15 * * * *',\n },\n});\n\nonCron.task({\n name: 'job',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - source: 'out/typescript/on_cron/workflow.ts', - blocks: { - run_workflow_on_cron: { - start: 14, - stop: 20, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.e2e.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.e2e.ts deleted file mode 100644 index 344207d5d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.e2e.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import sleep from '@hatchet-dev/typescript-sdk-dev/typescript-sdk/util/sleep';\nimport { randomUUID } from 'crypto';\nimport { Event } from '@hatchet-dev/typescript-sdk-dev/typescript-sdk/protoc/events';\nimport { SIMPLE_EVENT, lower, Input } from './workflow';\nimport { hatchet } from '../hatchet-client';\nimport { Worker } from '../../client/worker/worker';\n\nxdescribe('events-e2e', () => {\n let worker: Worker;\n let testRunId: string;\n\n beforeEach(async () => {\n testRunId = randomUUID();\n\n worker = await hatchet.worker('event-worker');\n await worker.registerWorkflow(lower);\n\n void worker.start();\n });\n\n afterAll(async () => {\n await worker.stop();\n await sleep(2000);\n });\n\n async function setupEventFilter(expression?: string, payload: Record = {}) {\n const finalExpression =\n expression || `input.ShouldSkip == false && payload.testRunId == '${testRunId}'`;\n\n const workflowId = (await hatchet.workflows.get(lower.name)).metadata.id;\n\n const filter = await hatchet.filters.create({\n workflowId,\n expression: finalExpression,\n scope: testRunId,\n payload: { testRunId, ...payload },\n });\n\n return async () => {\n await hatchet.filters.delete(filter.metadata.id);\n };\n }\n\n // Helper function to wait for events to process and fetch runs\n async function waitForEventsToProcess(events: Event[]): Promise> {\n await sleep(3000);\n\n const persisted = (await hatchet.events.list({ limit: 100 })).rows || [];\n\n // Ensure all our events are persisted\n const eventIds = new Set(events.map((e) => e.eventId));\n const persistedIds = new Set(persisted.map((e) => e.metadata.id));\n expect(Array.from(eventIds).every((id) => persistedIds.has(id))).toBeTruthy();\n\n let attempts = 0;\n const maxAttempts = 15;\n const eventToRuns: Record = {};\n\n while (true) {\n console.log('Waiting for event runs to complete...');\n if (attempts > maxAttempts) {\n console.log('Timed out waiting for event runs to complete.');\n return {};\n }\n\n attempts += 1;\n\n // For each event, fetch its runs\n const runsPromises = events.map(async (event) => {\n const runs = await hatchet.runs.list({\n triggeringEventExternalId: event.eventId,\n });\n\n // Extract metadata from event\n const meta = event.additionalMetadata ? JSON.parse(event.additionalMetadata) : {};\n\n const payload = event.payload ? JSON.parse(event.payload) : {};\n\n return {\n event: {\n id: event.eventId,\n payload,\n meta,\n shouldHaveRuns: Boolean(meta.should_have_runs),\n testRunId: meta.test_run_id,\n },\n runs: runs.rows || [],\n };\n });\n\n const eventRuns = await Promise.all(runsPromises);\n\n // If all events have no runs yet, wait and retry\n if (eventRuns.every(({ runs }) => runs.length === 0)) {\n await sleep(1000);\n\n continue;\n }\n\n // Store runs by event ID\n for (const { event, runs } of eventRuns) {\n eventToRuns[event.id] = runs;\n }\n\n // Check if any runs are still in progress\n const anyInProgress = Object.values(eventToRuns).some((runs) =>\n runs.some((run) => run.status === 'QUEUED' || run.status === 'RUNNING')\n );\n\n if (anyInProgress) {\n await sleep(1000);\n\n continue;\n }\n\n break;\n }\n\n return eventToRuns;\n }\n\n // Helper to verify runs match expectations\n function verifyEventRuns(eventData: any, runs: any[]) {\n if (eventData.shouldHaveRuns) {\n expect(runs.length).toBeGreaterThan(0);\n } else {\n expect(runs.length).toBe(0);\n }\n }\n\n // Helper to create bulk push event objects\n function createBulkPushEvent({\n index = 1,\n ShouldSkip = false,\n shouldHaveRuns = true,\n key = SIMPLE_EVENT,\n payload = {},\n scope = null,\n }: {\n index?: number;\n ShouldSkip?: boolean;\n shouldHaveRuns?: boolean;\n key?: string;\n payload?: Record;\n scope?: string | null;\n }) {\n return {\n key,\n payload: {\n ShouldSkip,\n Message: `This is event ${index}`,\n ...payload,\n },\n additionalMetadata: {\n should_have_runs: shouldHaveRuns,\n test_run_id: testRunId,\n key,\n index,\n },\n scope: scope || undefined,\n };\n }\n\n // Helper to create payload object\n function createEventPayload(ShouldSkip: boolean): Input {\n return { ShouldSkip, Message: 'This is event 1' };\n }\n\n it('should push an event', async () => {\n const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false));\n expect(event.eventId).toBeTruthy();\n }, 10000);\n\n it('should push an event asynchronously', async () => {\n const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false));\n expect(event.eventId).toBeTruthy();\n }, 10000);\n\n it('should bulk push events', async () => {\n const events = [\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 1', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user123' },\n },\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 2', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user456' },\n },\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 3', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user789' },\n },\n ];\n\n const result = await hatchet.events.bulkPush(SIMPLE_EVENT, events);\n\n expect(result.events.length).toBe(3);\n\n // Sort and verify namespacing\n const sortedEvents = [...events].sort((a, b) => a.key.localeCompare(b.key));\n const sortedResults = [...result.events].sort((a, b) => a.key.localeCompare(b.key));\n\n sortedEvents.forEach((originalEvent, index) => {\n const returnedEvent = sortedResults[index];\n expect(returnedEvent.key).toBe(originalEvent.key);\n });\n }, 15000);\n\n it('should process events according to event engine behavior', async () => {\n const eventPromises = [\n createBulkPushEvent({}),\n createBulkPushEvent({\n key: 'thisisafakeeventfoobarbaz',\n shouldHaveRuns: false,\n }),\n ].map((event) => convertBulkToSingle(event));\n const events = await Promise.all(eventPromises);\n\n const eventToRuns = await waitForEventsToProcess(events);\n\n // Verify each event's runs\n Object.keys(eventToRuns).forEach((eventId) => {\n const runs = eventToRuns[eventId];\n const eventInfo = events.find((e) => e.eventId === eventId);\n\n if (eventInfo) {\n const meta = JSON.parse(eventInfo.additionalMetadata || '{}');\n verifyEventRuns(\n {\n shouldHaveRuns: Boolean(meta.should_have_runs),\n },\n runs\n );\n }\n });\n }, 30000);\n\n function generateBulkEvents() {\n return [\n createBulkPushEvent({\n index: 1,\n ShouldSkip: false,\n shouldHaveRuns: true,\n }),\n createBulkPushEvent({\n index: 2,\n ShouldSkip: true,\n shouldHaveRuns: true,\n }),\n createBulkPushEvent({\n index: 3,\n ShouldSkip: false,\n shouldHaveRuns: true,\n scope: testRunId,\n }),\n createBulkPushEvent({\n index: 4,\n ShouldSkip: true,\n shouldHaveRuns: false,\n scope: testRunId,\n }),\n createBulkPushEvent({\n index: 5,\n ShouldSkip: true,\n shouldHaveRuns: false,\n scope: testRunId,\n key: 'thisisafakeeventfoobarbaz',\n }),\n createBulkPushEvent({\n index: 6,\n ShouldSkip: false,\n shouldHaveRuns: false,\n scope: testRunId,\n key: 'thisisafakeeventfoobarbaz',\n }),\n ];\n }\n\n async function convertBulkToSingle(event: any) {\n return hatchet.events.push(event.key, event.payload, {\n scope: event.scope,\n additionalMetadata: event.additionalMetadata,\n priority: event.priority,\n });\n }\n\n it('should handle event skipping and filtering without bulk push', async () => {\n const cleanup = await setupEventFilter();\n\n try {\n const rawEvents = generateBulkEvents();\n const eventPromises = rawEvents.map((event) => convertBulkToSingle(event));\n const events = await Promise.all(eventPromises);\n\n const eventToRuns = await waitForEventsToProcess(events);\n\n // Verify each event's runs\n Object.keys(eventToRuns).forEach((eventId) => {\n const runs = eventToRuns[eventId];\n const eventInfo = events.find((e) => e.eventId === eventId);\n\n if (eventInfo) {\n const meta = JSON.parse(eventInfo.additionalMetadata || '{}');\n verifyEventRuns(\n {\n shouldHaveRuns: Boolean(meta.should_have_runs),\n },\n runs\n );\n }\n });\n } finally {\n await cleanup();\n }\n }, 30000);\n\n it('should filter events by payload expression not matching', async () => {\n const cleanup = await setupEventFilter(\"input.ShouldSkip == false && payload.foobar == 'baz'\", {\n foobar: 'qux',\n });\n\n try {\n const event = await hatchet.events.push(\n SIMPLE_EVENT,\n { Message: 'This is event 1', ShouldSkip: false },\n {\n scope: testRunId,\n additionalMetadata: {\n should_have_runs: 'false',\n test_run_id: testRunId,\n key: '1',\n },\n }\n );\n\n const eventToRuns = await waitForEventsToProcess([event]);\n expect(Object.keys(eventToRuns).length).toBe(0);\n } finally {\n await cleanup();\n }\n }, 20000);\n\n it('should filter events by payload expression matching', async () => {\n const cleanup = await setupEventFilter(\"input.ShouldSkip == false && payload.foobar == 'baz'\", {\n foobar: 'baz',\n });\n\n try {\n const event = await hatchet.events.push(\n SIMPLE_EVENT,\n { Message: 'This is event 1', ShouldSkip: false },\n {\n scope: testRunId,\n additionalMetadata: {\n should_have_runs: 'true',\n test_run_id: testRunId,\n key: '1',\n },\n }\n );\n\n const eventToRuns = await waitForEventsToProcess([event]);\n const runs = Object.values(eventToRuns)[0] || [];\n expect(runs.length).toBeGreaterThan(0);\n } finally {\n await cleanup();\n }\n }, 20000);\n});\n", - source: 'out/typescript/on_event/event.e2e.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.ts deleted file mode 100644 index fd2bc4ddd..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { Input } from './workflow';\n\nasync function main() {\n // > Pushing an Event\n const res = await hatchet.events.push('simple-event:create', {\n Message: 'hello',\n ShouldSkip: false,\n });\n\n console.log(res.eventId);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/on_event/event.ts', - blocks: { - pushing_an_event: { - start: 6, - stop: 9, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/filter.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/filter.ts deleted file mode 100644 index 5d50a839d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/filter.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { lower, SIMPLE_EVENT } from './workflow';\n\n// > Create a filter\nhatchet.filters.create({\n workflowId: lower.id,\n expression: 'input.ShouldSkip == false',\n scope: 'foobarbaz',\n payload: {\n main_character: 'Anna',\n supporting_character: 'Stiva',\n location: 'Moscow',\n },\n});\n\n// > Skip a run\nhatchet.events.push(\n SIMPLE_EVENT,\n {\n Message: 'hello',\n ShouldSkip: true,\n },\n {\n scope: 'foobarbaz',\n }\n);\n\n// > Trigger a run\nhatchet.events.push(\n SIMPLE_EVENT,\n {\n Message: 'hello',\n ShouldSkip: false,\n },\n {\n scope: 'foobarbaz',\n }\n);\n", - source: 'out/typescript/on_event/filter.ts', - blocks: { - create_a_filter: { - start: 5, - stop: 14, - }, - skip_a_run: { - start: 17, - stop: 26, - }, - trigger_a_run: { - start: 29, - stop: 38, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/index.ts deleted file mode 100644 index dc6e1575e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import evente2e from './event.e2e'; -import event from './event'; -import filter from './filter'; -import worker from './worker'; -import workflow from './workflow'; - -export { evente2e }; -export { event }; -export { filter }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/worker.ts deleted file mode 100644 index 86dfe1001..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { lower, upper } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('on-event-worker', {\n workflows: [lower, upper],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/on_event/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/workflow.ts deleted file mode 100644 index f72896d83..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/workflow.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\nexport type Input = {\n Message: string;\n ShouldSkip: boolean;\n};\n\nexport const SIMPLE_EVENT = 'simple-event:create';\n\ntype LowerOutput = {\n lower: {\n TransformedMessage: string;\n };\n};\n\n// > Run workflow on event\nexport const lower = hatchet.workflow({\n name: 'lower',\n // 👀 Declare the event that will trigger the workflow\n onEvents: ['simple-event:create'],\n});\n\n// > Workflow with filter\nexport const lowerWithFilter = hatchet.workflow({\n name: 'lower',\n // 👀 Declare the event that will trigger the workflow\n onEvents: ['simple-event:create'],\n defaultFilters: [\n {\n expression: 'true',\n scope: 'example-scope',\n payload: {\n mainCharacter: 'Anna',\n supportingCharacter: 'Stiva',\n location: 'Moscow',\n },\n },\n ],\n});\n\nlower.task({\n name: 'lower',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\ntype UpperOutput = {\n upper: {\n TransformedMessage: string;\n };\n};\n\nexport const upper = hatchet.workflow({\n name: 'upper',\n on: {\n event: SIMPLE_EVENT,\n },\n});\n\nupper.task({\n name: 'upper',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n\n// > Accessing the filter payload\nlowerWithFilter.task({\n name: 'lowerWithFilter',\n fn: (input, ctx) => {\n console.log(ctx.filterPayload());\n },\n});\n", - source: 'out/typescript/on_event/workflow.ts', - blocks: { - run_workflow_on_event: { - start: 17, - stop: 21, - }, - workflow_with_filter: { - start: 24, - stop: 39, - }, - accessing_the_filter_payload: { - start: 73, - stop: 78, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/run.ts deleted file mode 100644 index 3a67b160a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { failureWorkflow } from './workflow';\n\nasync function main() {\n try {\n const res = await failureWorkflow.run({});\n console.log(res);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/on_failure/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/worker.ts deleted file mode 100644 index 67f4b048a..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { failureWorkflow } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('always-fail-worker', {\n workflows: [failureWorkflow],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/on_failure/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/workflow.ts deleted file mode 100644 index f6306a97e..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_failure/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\n// > On Failure Task\nexport const failureWorkflow = hatchet.workflow({\n name: 'always-fail',\n});\n\nfailureWorkflow.task({\n name: 'always-fail',\n fn: async () => {\n throw new Error('intentional failure');\n },\n});\n\nfailureWorkflow.onFailure({\n name: 'on-failure',\n fn: async (input, ctx) => {\n console.log('onFailure for run:', ctx.workflowRunId());\n return {\n 'on-failure': 'success',\n };\n },\n});\n", - source: 'out/typescript/on_failure/workflow.ts', - blocks: { - on_failure_task: { - start: 4, - stop: 23, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/run.ts deleted file mode 100644 index c97784d07..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { onSuccessDag } from './workflow';\n\nasync function main() {\n try {\n const res2 = await onSuccessDag.run({});\n console.log(res2);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/on_success/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/worker.ts deleted file mode 100644 index 8bc9e3bf5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { onSuccessDag } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('always-succeed-worker', {\n workflows: [onSuccessDag],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/on_success/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/workflow.ts deleted file mode 100644 index 7229e5fb0..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_success/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\n// > On Success DAG\nexport const onSuccessDag = hatchet.workflow({\n name: 'on-success-dag',\n});\n\nonSuccessDag.task({\n name: 'always-succeed',\n fn: async () => {\n return {\n 'always-succeed': 'success',\n };\n },\n});\nonSuccessDag.task({\n name: 'always-succeed2',\n fn: async () => {\n return {\n 'always-succeed': 'success',\n };\n },\n});\n\n// 👀 onSuccess handler will run if all tasks in the workflow succeed\nonSuccessDag.onSuccess({\n fn: (_, ctx) => {\n console.log('onSuccess for run:', ctx.workflowRunId());\n return {\n 'on-success': 'success',\n };\n },\n});\n", - source: 'out/typescript/on_success/workflow.ts', - blocks: { - on_success_dag: { - start: 4, - stop: 33, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/run.ts deleted file mode 100644 index abe5518ad..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/run.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { Priority } from '@hatchet-dev/typescript-sdk/v1';\nimport { priority } from './workflow';\n\nasync function main() {\n try {\n console.log('running priority workflow');\n\n // > Run a Task with a Priority\n const run = priority.run(new Date(Date.now() + 60 * 60 * 1000), { priority: Priority.HIGH });\n\n // > Schedule and cron\n const scheduled = priority.schedule(\n new Date(Date.now() + 60 * 60 * 1000),\n {},\n { priority: Priority.HIGH }\n );\n const delayed = priority.delay(60 * 60 * 1000, {}, { priority: Priority.HIGH });\n const cron = priority.cron(\n `daily-cron-${Math.random()}`,\n '0 0 * * *',\n {},\n { priority: Priority.HIGH }\n );\n\n const [scheduledResult, delayedResult] = await Promise.all([scheduled, delayed]);\n console.log('scheduledResult', scheduledResult);\n console.log('delayedResult', delayedResult);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/priority/run.ts', - blocks: { - run_a_task_with_a_priority: { - start: 9, - stop: 9, - }, - schedule_and_cron: { - start: 12, - stop: 23, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/worker.ts deleted file mode 100644 index 8b261bf97..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { priorityTasks } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('priority-worker', {\n workflows: [...priorityTasks],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/priority/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/workflow.ts deleted file mode 100644 index a8446b81f..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/priority/workflow.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { Priority } from '@hatchet-dev/typescript-sdk/v1';\nimport { hatchet } from '../hatchet-client';\n\n// > Simple Task Priority\nexport const priority = hatchet.task({\n name: 'priority',\n defaultPriority: Priority.MEDIUM,\n fn: async (_, ctx) => {\n return {\n priority: ctx.priority(),\n };\n },\n});\n\n// > Task Priority in a Workflow\nexport const priorityWf = hatchet.workflow({\n name: 'priorityWf',\n defaultPriority: Priority.LOW,\n});\n\npriorityWf.task({\n name: 'child-medium',\n fn: async (_, ctx) => {\n return {\n priority: ctx.priority(),\n };\n },\n});\n\npriorityWf.task({\n name: 'child-high',\n // will inherit the default priority from the workflow\n fn: async (_, ctx) => {\n return {\n priority: ctx.priority(),\n };\n },\n});\n\nexport const priorityTasks = [priority, priorityWf];\n", - source: 'out/typescript/priority/workflow.ts', - blocks: { - simple_task_priority: { - start: 5, - stop: 13, - }, - task_priority_in_a_workflow: { - start: 16, - stop: 19, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/gitignore.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/gitignore.ts deleted file mode 100644 index 57004994b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/gitignore.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'unknown', - content: - 'certs/\n\n# Environments\n.env\nenv/\n\n# TypeScript React\nnode_modules/\ndist/\nbuild/\n\n.DS_Store\n\nindex/index.json\n', - source: 'out/typescript/quickstart/.gitignore', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/hatchet-client.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/hatchet-client.ts deleted file mode 100644 index 09d8e57da..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/hatchet-client.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import Hatchet from '@hatchet-dev/typescript-sdk/sdk';\n\nexport const hatchet = Hatchet.init();\n", - source: 'out/typescript/quickstart/hatchet-client.ts', - blocks: {}, - highlights: { - client: { - lines: [3], - strings: ['Client'], - }, - }, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/index.ts deleted file mode 100644 index 05c4b45ae..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import gitignore from './gitignore'; -import hatchet_client from './hatchet-client'; -import run from './run'; -import worker from './worker'; -import * as workflows from './workflows'; - -export { gitignore }; -export { hatchet_client }; -export { run }; -export { worker }; -export { workflows }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/run.ts deleted file mode 100644 index 57637cd03..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { firstTask } from './workflows/first-task';\n\nasync function main() {\n const res = await firstTask.run({\n Message: 'Hello World!',\n });\n\n console.log(\n 'Finished running task, and got the transformed message! The transformed message is:',\n res.TransformedMessage\n );\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/quickstart/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/worker.ts deleted file mode 100644 index 5f84c12f5..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { firstTask } from './workflows/first-task';\n\nasync function main() {\n const worker = await hatchet.worker('first-worker', {\n workflows: [firstTask],\n slots: 10,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/quickstart/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/workflows/first-task.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/workflows/first-task.ts deleted file mode 100644 index d0466af01..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/workflows/first-task.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n};\n\ntype SimpleOutput = {\n TransformedMessage: string;\n};\n\nexport const firstTask = hatchet.task({\n name: 'first-task',\n fn: (input: SimpleInput, ctx): SimpleOutput => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - source: 'out/typescript/quickstart/workflows/first-task.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/workflows/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/workflows/index.ts deleted file mode 100644 index 90382225d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/quickstart/workflows/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import first_task from './first-task'; - -export { first_task }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/rate_limit/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/rate_limit/index.ts deleted file mode 100644 index f37d26e10..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/rate_limit/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import workflow from './workflow'; - -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/rate_limit/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/rate_limit/workflow.ts deleted file mode 100644 index 345fa100d..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/rate_limit/workflow.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { RateLimitDuration } from '@hatchet-dev/typescript-sdk/protoc/v1/workflows';\nimport { hatchet } from '../hatchet-client';\n\n// > Upsert Rate Limit\nhatchet.ratelimits.upsert({\n key: 'api-service-rate-limit',\n limit: 10,\n duration: RateLimitDuration.SECOND,\n});\n\n// > Static\nconst RATE_LIMIT_KEY = 'api-service-rate-limit';\n\nconst task1 = hatchet.task({\n name: 'task1',\n rateLimits: [\n {\n staticKey: RATE_LIMIT_KEY,\n units: 1,\n },\n ],\n fn: (input) => {\n console.log('executed task1');\n },\n});\n\n\n// > Dynamic\nconst task2 = hatchet.task({\n name: 'task2',\n fn: (input: { userId: string }) => {\n console.log('executed task2 for user: ', input.userId);\n },\n rateLimits: [\n {\n dynamicKey: 'input.userId',\n units: 1,\n limit: 10,\n duration: RateLimitDuration.MINUTE,\n },\n ],\n});\n", - source: 'out/typescript/rate_limit/workflow.ts', - blocks: { - upsert_rate_limit: { - start: 5, - stop: 9, - }, - static: { - start: 12, - stop: 26, - }, - dynamic: { - start: 29, - stop: 42, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/run.ts deleted file mode 100644 index 7e3914a40..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { retries } from './workflow';\n\nasync function main() {\n try {\n const res = await retries.run({});\n console.log(res);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/retries/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/worker.ts deleted file mode 100644 index 283d7e007..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { retries } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('always-fail-worker', {\n workflows: [retries],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/retries/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/workflow.ts deleted file mode 100644 index 81708e350..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/retries/workflow.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\n\n// > Simple Step Retries\nexport const retries = hatchet.task({\n name: 'retries',\n retries: 3,\n fn: async (_, ctx) => {\n throw new Error('intentional failure');\n },\n});\n\n// > Retries with Count\nexport const retriesWithCount = hatchet.task({\n name: 'retriesWithCount',\n retries: 3,\n fn: async (_, ctx) => {\n // > Get the current retry count\n const retryCount = ctx.retryCount();\n\n console.log(`Retry count: ${retryCount}`);\n\n if (retryCount < 2) {\n throw new Error('intentional failure');\n }\n\n return {\n message: 'success',\n };\n },\n});\n\n// > Retries with Backoff\nexport const withBackoff = hatchet.task({\n name: 'withBackoff',\n retries: 10,\n backoff: {\n // 👀 Maximum number of seconds to wait between retries\n maxSeconds: 10,\n // 👀 Factor to increase the wait time between retries.\n // This sequence will be 2s, 4s, 8s, 10s, 10s, 10s... due to the maxSeconds limit\n factor: 2,\n },\n fn: async () => {\n throw new Error('intentional failure');\n },\n});\n", - source: 'out/typescript/retries/workflow.ts', - blocks: { - simple_step_retries: { - start: 4, - stop: 10, - }, - get_the_current_retry_count: { - start: 18, - stop: 30, - }, - retries_with_backoff: { - start: 33, - stop: 46, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/bulk.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/bulk.ts deleted file mode 100644 index 97425c239..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/bulk.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simple, SimpleInput } from './workflow';\n\nasync function main() {\n // > Bulk Run a Task\n const res = await simple.run([\n {\n Message: 'HeLlO WoRlD',\n },\n {\n Message: 'Hello MoOn',\n },\n ]);\n\n // 👀 Access the results of the Task\n console.log(res[0].TransformedMessage);\n console.log(res[1].TransformedMessage);\n\n // > Bulk Run Tasks from within a Task\n const parent = hatchet.task({\n name: 'simple',\n fn: async (input: SimpleInput, ctx) => {\n // Bulk run two tasks in parallel\n const child = await ctx.bulkRunChildren([\n {\n workflow: simple,\n input: {\n Message: 'Hello, World!',\n },\n },\n {\n workflow: simple,\n input: {\n Message: 'Hello, Moon!',\n },\n },\n ]);\n\n return {\n TransformedMessage: `${child[0].TransformedMessage} ${child[1].TransformedMessage}`,\n };\n },\n });\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/simple/bulk.ts', - blocks: { - bulk_run_a_task: { - start: 6, - stop: 17, - }, - bulk_run_tasks_from_within_a_task: { - start: 20, - stop: 43, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/client-run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/client-run.ts deleted file mode 100644 index 459dcf3a4..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/client-run.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Client Run Methods\nimport { hatchet } from '../hatchet-client';\n\nhatchet.run('simple', { Message: 'Hello, World!' });\n\nhatchet.runNoWait('simple', { Message: 'Hello, World!' }, {});\n\nhatchet.schedules.create('simple', {\n triggerAt: new Date(Date.now() + 1000 * 60 * 60 * 24),\n input: { Message: 'Hello, World!' },\n});\n\nhatchet.crons.create('simple', {\n name: 'my-cron',\n expression: '0 0 * * *',\n input: { Message: 'Hello, World!' },\n});\n", - source: 'out/typescript/simple/client-run.ts', - blocks: { - client_run_methods: { - start: 2, - stop: 17, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/cron.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/cron.ts deleted file mode 100644 index dd95e4620..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/cron.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n // > Create\n const cron = await simple.cron('simple-daily', '0 0 * * *', {\n Message: 'hello',\n });\n\n // it may be useful to save the cron id for later\n const cronId = cron.metadata.id;\n\n console.log(cron.metadata.id);\n\n // > Delete\n await hatchet.crons.delete(cronId);\n\n // > List\n const crons = await hatchet.crons.list({\n workflow: simple,\n });\n\n console.log(crons);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/simple/cron.ts', - blocks: { - create: { - start: 6, - stop: 11, - }, - delete: { - start: 16, - stop: 16, - }, - list: { - start: 19, - stop: 21, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/delay.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/delay.ts deleted file mode 100644 index 40d6a3df2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/delay.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n const tomorrow = 24 * 60 * 60; // 1 day\n const scheduled = await simple.delay(tomorrow, {\n Message: 'hello',\n });\n\n console.log(scheduled.metadata.id);\n\n await hatchet.schedules.delete(scheduled);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/simple/delay.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/enqueue.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/enqueue.ts deleted file mode 100644 index e24958760..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/enqueue.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { SimpleOutput } from './stub-workflow';\n// > Enqueuing a Workflow (Fire and Forget)\nimport { simple } from './workflow';\n// ...\n\nasync function main() {\n // 👀 Enqueue the workflow\n const run = await simple.runNoWait({\n Message: 'hello',\n });\n\n // 👀 Get the run ID of the workflow\n const runId = await run.getWorkflowRunId();\n // It may be helpful to store the run ID of the workflow\n // in a database or other persistent storage for later use\n console.log(runId);\n\n // > Subscribing to results\n // the return object of the enqueue method is a WorkflowRunRef which includes a listener for the result of the workflow\n const result = await run.result();\n console.log(result);\n\n // if you need to subscribe to the result of the workflow at a later time, you can use the runRef method and the stored runId\n const ref = hatchet.runRef(runId);\n const result2 = await ref.result();\n console.log(result2);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/simple/enqueue.ts', - blocks: { - enqueuing_a_workflow_fire_and_forget: { - start: 4, - stop: 17, - }, - subscribing_to_results: { - start: 20, - stop: 27, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/index.ts deleted file mode 100644 index 052ec4d83..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/index.ts +++ /dev/null @@ -1,23 +0,0 @@ -import bulk from './bulk'; -import client_run from './client-run'; -import cron from './cron'; -import delay from './delay'; -import enqueue from './enqueue'; -import run from './run'; -import schedule from './schedule'; -import stub_workflow from './stub-workflow'; -import worker from './worker'; -import workflow_with_child from './workflow-with-child'; -import workflow from './workflow'; - -export { bulk }; -export { client_run }; -export { cron }; -export { delay }; -export { enqueue }; -export { run }; -export { schedule }; -export { stub_workflow }; -export { worker }; -export { workflow_with_child }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/run.ts deleted file mode 100644 index d835ccce2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/run.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\nimport { parent } from './workflow-with-child';\n\nasync function main() {\n // > Running a Task\n const res = await parent.run(\n {\n Message: 'HeLlO WoRlD',\n },\n {\n additionalMetadata: {\n test: 'test',\n },\n }\n );\n\n // 👀 Access the results of the Task\n console.log(res.TransformedMessage);\n}\n\nexport async function extra() {\n // > Running Multiple Tasks\n const res1 = simple.run({\n Message: 'HeLlO WoRlD',\n });\n\n const res2 = simple.run({\n Message: 'Hello MoOn',\n });\n\n const results = await Promise.all([res1, res2]);\n\n console.log(results[0].TransformedMessage);\n console.log(results[1].TransformedMessage);\n\n // > Spawning Tasks from within a Task\n const parentTask = hatchet.task({\n name: 'parent',\n fn: async (input, ctx) => {\n // Simply the task and it will be spawned from the parent task\n const child = await simple.run({\n Message: 'HeLlO WoRlD',\n });\n\n return {\n result: child.TransformedMessage,\n };\n },\n });\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => {\n process.exit(0);\n });\n}\n", - source: 'out/typescript/simple/run.ts', - blocks: { - running_a_task: { - start: 7, - stop: 19, - }, - running_multiple_tasks: { - start: 24, - stop: 35, - }, - spawning_tasks_from_within_a_task: { - start: 38, - stop: 50, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/schedule.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/schedule.ts deleted file mode 100644 index 1d581c3ae..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/schedule.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n // > Create a Scheduled Run\n\n const runAt = new Date(new Date().setHours(12, 0, 0, 0) + 24 * 60 * 60 * 1000);\n\n const scheduled = await simple.schedule(runAt, {\n Message: 'hello',\n });\n\n // 👀 Get the scheduled run ID of the workflow\n // it may be helpful to store the scheduled run ID of the workflow\n // in a database or other persistent storage for later use\n const scheduledRunId = scheduled.metadata.id;\n console.log(scheduledRunId);\n\n // > Delete a Scheduled Run\n await hatchet.scheduled.delete(scheduled);\n\n // > List Scheduled Runs\n const scheduledRuns = await hatchet.scheduled.list({\n workflow: simple,\n });\n console.log(scheduledRuns);\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/simple/schedule.ts', - blocks: { - create_a_scheduled_run: { - start: 6, - stop: 17, - }, - delete_a_scheduled_run: { - start: 20, - stop: 20, - }, - list_scheduled_runs: { - start: 23, - stop: 26, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/stub-workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/stub-workflow.ts deleted file mode 100644 index 470da1586..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/stub-workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring an External Workflow Reference\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// (optional) Define the output type for the workflow\nexport type SimpleOutput = {\n 'to-lower': {\n TransformedMessage: string;\n };\n};\n\n// declare the workflow with the same name as the\n// workflow name on the worker\nexport const simple = hatchet.workflow({\n name: 'simple',\n});\n\n// you can use all the same run methods on the stub\n// with full type-safety\nsimple.run({ Message: 'Hello, World!' });\nsimple.runNoWait({ Message: 'Hello, World!' });\nsimple.schedule(new Date(), { Message: 'Hello, World!' });\nsimple.cron('my-cron', '0 0 * * *', { Message: 'Hello, World!' });\n", - source: 'out/typescript/simple/stub-workflow.ts', - blocks: { - declaring_an_external_workflow_reference: { - start: 2, - stop: 27, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/worker.ts deleted file mode 100644 index f696637db..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\nimport { parent, child } from './workflow-with-child';\n\nasync function main() {\n const worker = await hatchet.worker('simple-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [simple, parent, child],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 100,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/simple/worker.ts', - blocks: { - declaring_a_worker: { - start: 2, - stop: 19, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/workflow-with-child.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/workflow-with-child.ts deleted file mode 100644 index 642d140dc..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/workflow-with-child.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring a Task\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type ChildInput = {\n Message: string;\n};\n\nexport type ParentInput = {\n Message: string;\n};\n\nexport const child = hatchet.workflow({\n name: 'child',\n});\n\nexport const child1 = child.task({\n name: 'child1',\n fn: (input: ChildInput, ctx) => {\n ctx.logger.info('hello from the child1', { hello: 'moon' });\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nexport const child2 = child.task({\n name: 'child2',\n fn: (input: ChildInput, ctx) => {\n ctx.logger.info('hello from the child2');\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nexport const child3 = child.task({\n name: 'child3',\n parents: [child1, child2],\n fn: (input: ChildInput, ctx) => {\n ctx.logger.info('hello from the child3');\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nexport const parent = hatchet.task({\n name: 'parent',\n fn: async (input: ParentInput, ctx) => {\n const c = await ctx.runChild(child, {\n Message: input.Message,\n });\n\n return {\n TransformedMessage: 'not implemented',\n };\n },\n});\n\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - source: 'out/typescript/simple/workflow-with-child.ts', - blocks: { - declaring_a_task: { - start: 2, - stop: 60, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/workflow.ts deleted file mode 100644 index 13bec3c62..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/simple/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring a Task\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\nexport const simple = hatchet.task({\n name: 'simple',\n retries: 3,\n fn: async (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - source: 'out/typescript/simple/workflow.ts', - blocks: { - declaring_a_task: { - start: 2, - stop: 18, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/run.ts deleted file mode 100644 index 3e7ba7607..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/run.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { retries } from '../retries/workflow';\n\nasync function main() {\n try {\n const res = await retries.run({});\n console.log(res);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/sticky/run.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/worker.ts deleted file mode 100644 index f186a6082..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { retries } from '../retries/workflow';\n\nasync function main() {\n const worker = await hatchet.worker('always-fail-worker', {\n workflows: [retries],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/sticky/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/workflow.ts deleted file mode 100644 index 43cf89eac..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/sticky/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { StickyStrategy } from '@hatchet-dev/typescript-sdk/protoc/workflows';\nimport { hatchet } from '../hatchet-client';\nimport { child } from '../child_workflows/workflow';\n\n// > Sticky Task\nexport const sticky = hatchet.task({\n name: 'sticky',\n retries: 3,\n sticky: StickyStrategy.SOFT,\n fn: async (_, ctx) => {\n // specify a child workflow to run on the same worker\n const result = await child.run(\n {\n N: 1,\n },\n { sticky: true }\n );\n\n return {\n result,\n };\n },\n});\n", - source: 'out/typescript/sticky/workflow.ts', - blocks: { - sticky_task: { - start: 6, - stop: 23, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/index.ts deleted file mode 100644 index 9d70edb0b..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import nextjs_proxy from './nextjs-proxy'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { nextjs_proxy }; -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/nextjs-proxy.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/nextjs-proxy.ts deleted file mode 100644 index 0199eb700..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/nextjs-proxy.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { Readable } from 'stream';\nimport { hatchet } from '../hatchet-client';\nimport { streamingTask } from './workflow';\n\n// > NextJS Proxy\nexport async function GET() {\n try {\n const ref = await streamingTask.runNoWait({});\n const workflowRunId = await ref.getWorkflowRunId();\n\n const stream = Readable.from(hatchet.runs.subscribeToStream(workflowRunId));\n\n // @ts-ignore\n return new Response(Readable.toWeb(stream), {\n headers: {\n 'Content-Type': 'text/plain',\n 'Cache-Control': 'no-cache',\n Connection: 'keep-alive',\n },\n });\n } catch (error) {\n return new Response('Internal Server Error', { status: 500 });\n }\n}\n", - source: 'out/typescript/streaming/nextjs-proxy.ts', - blocks: { - nextjs_proxy: { - start: 6, - stop: 24, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/run.ts deleted file mode 100644 index 8a6755bb2..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/run.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { streamingTask } from './workflow';\nimport { hatchet } from '../hatchet-client';\n\nasync function main() {\n // > Consume\n const ref = await streamingTask.runNoWait({});\n const id = await ref.getWorkflowRunId();\n\n for await (const content of hatchet.runs.subscribeToStream(id)) {\n process.stdout.write(content);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => {\n process.exit(0);\n });\n}\n", - source: 'out/typescript/streaming/run.ts', - blocks: { - consume: { - start: 6, - stop: 11, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/worker.ts deleted file mode 100644 index d7a2b1eec..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/worker.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import { hatchet } from '../hatchet-client';\nimport { streamingTask } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('streaming-worker', {\n workflows: [streamingTask],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/streaming/worker.ts', - blocks: {}, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/workflow.ts deleted file mode 100644 index 07c190314..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/streaming/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "import sleep from '../../../util/sleep';\nimport { hatchet } from '../hatchet-client';\n\n// > Streaming\nconst annaKarenina = `\nHappy families are all alike; every unhappy family is unhappy in its own way.\n\nEverything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him.\n`;\n\nfunction* createChunks(content: string, n: number): Generator {\n for (let i = 0; i < content.length; i += n) {\n yield content.slice(i, i + n);\n }\n}\n\nexport const streamingTask = hatchet.task({\n name: 'stream-example',\n fn: async (_, ctx) => {\n await sleep(2000);\n\n for (const chunk of createChunks(annaKarenina, 10)) {\n ctx.putStream(chunk);\n await sleep(200);\n }\n },\n});\n\n", - source: 'out/typescript/streaming/workflow.ts', - blocks: { - streaming: { - start: 5, - stop: 28, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/index.ts deleted file mode 100644 index 9f263b9c7..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run }; -export { worker }; -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/run.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/run.ts deleted file mode 100644 index e4f633728..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/run.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Running a Task with Results\nimport { cancellation } from './workflow';\n// ...\nasync function main() {\n // 👀 Run the workflow with results\n const res = await cancellation.run({});\n\n // 👀 Access the results of the workflow\n console.log(res.Completed);\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - source: 'out/typescript/timeouts/run.ts', - blocks: { - running_a_task_with_results: { - start: 2, - stop: 9, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/worker.ts deleted file mode 100644 index b5d9d5a68..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/worker.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { cancellation } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('cancellation-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [cancellation],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 100,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - source: 'out/typescript/timeouts/worker.ts', - blocks: { - declaring_a_worker: { - start: 2, - stop: 18, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/workflow.ts deleted file mode 100644 index f23cf6f34..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/timeouts/workflow.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring a Task\nimport sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport const cancellation = hatchet.task({\n name: 'cancellation',\n executionTimeout: '3s',\n fn: async (_, { cancelled }) => {\n await sleep(10 * 1000);\n\n if (cancelled) {\n throw new Error('Task was cancelled');\n }\n\n return {\n Completed: true,\n };\n },\n});\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - source: 'out/typescript/timeouts/workflow.ts', - blocks: { - declaring_a_task: { - start: 2, - stop: 20, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/with_timeouts/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/with_timeouts/index.ts deleted file mode 100644 index f37d26e10..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/with_timeouts/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import workflow from './workflow'; - -export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/with_timeouts/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/with_timeouts/workflow.ts deleted file mode 100644 index ae3262a70..000000000 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/with_timeouts/workflow.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { Snippet } from '@/next/lib/docs/generated/snips/types'; - -const snippet: Snippet = { - language: 'typescript ', - content: - "// > Declaring a Task\nimport sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// > Execution Timeout\nexport const withTimeouts = hatchet.task({\n name: 'with-timeouts',\n // time the task can wait in the queue before it is cancelled\n scheduleTimeout: '10s',\n // time the task can run before it is cancelled\n executionTimeout: '10s',\n fn: async (input: SimpleInput, ctx) => {\n // wait 15 seconds\n await sleep(15000);\n\n // get the abort controller\n const { abortController } = ctx;\n\n // if the abort controller is aborted, throw an error\n if (abortController.signal.aborted) {\n throw new Error('cancelled');\n }\n\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\n// > Refresh Timeout\nexport const refreshTimeout = hatchet.task({\n name: 'refresh-timeout',\n executionTimeout: '10s',\n scheduleTimeout: '10s',\n fn: async (input: SimpleInput, ctx) => {\n // adds 15 seconds to the execution timeout\n ctx.refreshTimeout('15s');\n await sleep(15000);\n\n // get the abort controller\n const { abortController } = ctx;\n\n // now this condition will not be met\n // if the abort controller is aborted, throw an error\n if (abortController.signal.aborted) {\n throw new Error('cancelled');\n }\n\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - source: 'out/typescript/with_timeouts/workflow.ts', - blocks: { - execution_timeout: { - start: 11, - stop: 33, - }, - refresh_timeout: { - start: 36, - stop: 58, - }, - }, - highlights: {}, -}; - -export default snippet; diff --git a/frontend/app/src/next/lib/docs/snips.ts b/frontend/app/src/next/lib/docs/snips.ts deleted file mode 100644 index eea7fa806..000000000 --- a/frontend/app/src/next/lib/docs/snips.ts +++ /dev/null @@ -1,5 +0,0 @@ -import * as snippets from './generated/snips/index'; -import { Snippet as SnippetType } from './generated/snips/types'; - -export type Snippet = SnippetType; -export default snippets; diff --git a/frontend/app/src/next/lib/docs/sync-docs.ts b/frontend/app/src/next/lib/docs/sync-docs.ts deleted file mode 100644 index cc11f76f0..000000000 --- a/frontend/app/src/next/lib/docs/sync-docs.ts +++ /dev/null @@ -1,332 +0,0 @@ -#!/usr/bin/env node -import fs from 'fs'; -import path from 'path'; -import { fileURLToPath } from 'url'; - -// Get the directory paths -const dirname = path.dirname(fileURLToPath(import.meta.url)); -const docsDir = path.resolve(dirname, '../../../../../docs/pages'); -const generatedDir = path.resolve(dirname, 'generated'); - -// Make sure the generated directory exists -if (!fs.existsSync(generatedDir)) { - fs.mkdirSync(generatedDir, { recursive: true }); -} - -// Keep track of found meta files for generating the index -const metaFiles: { importName: string; importPath: string }[] = []; - -// Function to process a directory recursively -function processDirectory(dirPath: string, relativePath: string = '') { - const entries = fs.readdirSync(dirPath, { withFileTypes: true }); - - // First, find if there's a _meta.js file in this directory - const metaFile = entries.find( - (entry) => entry.isFile() && entry.name === '_meta.js', - ); - - if (metaFile) { - // Determine the target directory path - const targetDirPath = path.join(generatedDir, relativePath); - - // Create the target directory if it doesn't exist - if (!fs.existsSync(targetDirPath)) { - fs.mkdirSync(targetDirPath, { recursive: true }); - } - - // Read the meta file - const metaFilePath = path.join(dirPath, metaFile.name); - const metaContent = fs.readFileSync(metaFilePath, 'utf8'); - - try { - // Build a dictionary from the meta.js file - const metaObject = parseMetaFile(metaContent); - - // Process the object to ensure all entries have title and href - const processedObj = processMetaObject(metaObject, relativePath); - - // Convert to well-formatted string - const formattedStr = formatObject(processedObj); - const formattedPath = metaFilePath.replace(/^.*?(frontend)/i, '$1'); - - // Create a TypeScript version with the proper format - const tsContent = `// Generated from ${formattedPath} -const meta = ${formattedStr}; -export default meta; -`; - - // Write the TypeScript file - const targetFilePath = path.join(targetDirPath, '_meta.ts'); - fs.writeFileSync(targetFilePath, tsContent, 'utf8'); - - // Add to the list of meta files for the index - let importName = relativePath.replace(/\//g, '') || 'root'; - - // Handle nested paths by creating camelCase names - if (importName !== 'root') { - importName = importName - .split('/') - .map((part, index) => { - if (index === 0) { - return part; - } - return part.charAt(0).toUpperCase() + part.slice(1); - }) - .join(''); - } - - metaFiles.push({ - importName: importName.replace(/-/g, '_'), - importPath: `./${relativePath ? relativePath + '/' : ''}_meta`, - }); - } catch (err) { - console.error(`Error processing ${metaFilePath}:`, err); - const formattedPath = metaFilePath.replace(/^.*?(frontend)/i, '$1'); - - // Create a TypeScript version with the proper format - const tsContent = `// Generated from ${formattedPath} -const meta = ${metaContent.replace('export default', '')}; -export default meta; -`; - const targetFilePath = path.join(targetDirPath, '_meta.ts'); - fs.writeFileSync(targetFilePath, tsContent, 'utf8'); - } - } - - // Process subdirectories - for (const entry of entries) { - if (entry.isDirectory()) { - processDirectory( - path.join(dirPath, entry.name), - relativePath ? path.join(relativePath, entry.name) : entry.name, - ); - } - } -} - -// Function to parse a meta.js file into a dictionary -function parseMetaFile(content: string): Record { - // Remove the "export default" and trailing semicolon - const objectStr = content - .replace(/export\s+default\s*/, '') - .trim() - .replace(/;$/, ''); - - // Parse the object structure - const result: Record = {}; - - // Simple regex-based parser for the object structure - // Extract key-value pairs from the object - let depth = 0; - let inString = false; - let stringDelimiter = ''; - - // Skip the first opening brace and last closing brace - const objContent = objectStr - .substring(objectStr.indexOf('{') + 1, objectStr.lastIndexOf('}')) - .trim(); - - // Split by commas that are not inside nested objects or strings - const entries: string[] = []; - let currentEntry = ''; - - for (let i = 0; i < objContent.length; i++) { - const char = objContent[i]; - - if (char === '{') { - depth++; - } - if (char === '}') { - depth--; - } - - if (char === '"' || char === "'") { - if (!inString) { - inString = true; - stringDelimiter = char; - } else if (char === stringDelimiter && objContent[i - 1] !== '\\') { - inString = false; - } - } - - if (char === ',' && depth === 0 && !inString) { - entries.push(currentEntry.trim()); - currentEntry = ''; - } else { - currentEntry += char; - } - } - - if (currentEntry.trim()) { - entries.push(currentEntry.trim()); - } - - // Process each entry - for (const entry of entries) { - // Split by the first colon not in a string - let colonIndex = -1; - inString = false; - stringDelimiter = ''; - - for (let i = 0; i < entry.length; i++) { - const char = entry[i]; - - if (char === '"' || char === "'") { - if (!inString) { - inString = true; - stringDelimiter = char; - } else if (char === stringDelimiter && entry[i - 1] !== '\\') { - inString = false; - } - } - - if (char === ':' && !inString && colonIndex === -1) { - colonIndex = i; - break; - } - } - - if (colonIndex === -1) { - continue; - } - - const keyPart = entry.substring(0, colonIndex).trim(); - const valuePart = entry.substring(colonIndex + 1).trim(); - - // Extract the key (remove quotes if present) - let key = keyPart; - if ( - (key.startsWith('"') && key.endsWith('"')) || - (key.startsWith("'") && key.endsWith("'")) - ) { - key = key.substring(1, key.length - 1); - } - - // Parse the value - let value: any; - - if (valuePart === 'true') { - value = true; - } else if (valuePart === 'false') { - value = false; - } else if (valuePart === 'null') { - value = null; - } else if (valuePart === 'undefined') { - value = undefined; - } else if (valuePart.startsWith('{') && valuePart.endsWith('}')) { - // Nested object - recursively parse - value = parseMetaFile(`export default ${valuePart}`); - } else if ( - (valuePart.startsWith('"') && valuePart.endsWith('"')) || - (valuePart.startsWith("'") && valuePart.endsWith("'")) - ) { - // String value - value = valuePart.substring(1, valuePart.length - 1); - } else if (!isNaN(Number(valuePart))) { - // Number value - value = Number(valuePart); - } else { - // Unknown/complex value - keep as string - value = valuePart; - } - - result[key] = value; - } - - return result; -} - -// Function to format an object as a string -function formatObject(obj: Record, indent = 0): string { - const spaces = ' '.repeat(indent); - let result = '{\n'; - - for (const [key, value] of Object.entries(obj)) { - // Format the key - add quotes for keys with special characters - const formattedKey = /^[a-zA-Z0-9_]+$/.test(key) ? key : `'${key}'`; - - // Format the value based on type - let formattedValue: string; - - if (value === null) { - formattedValue = 'null'; - } else if (typeof value === 'object') { - formattedValue = formatObject(value, indent + 2); - } else if (typeof value === 'string') { - formattedValue = `'${value.replace(/'/g, "\\'")}'`; - } else { - formattedValue = String(value); - } - - result += `${spaces} ${formattedKey}: ${formattedValue},\n`; - } - - if (result.endsWith(',\n')) { - result = result.slice(0, -2) + '\n'; - } - - result += `${spaces}}`; - return result; -} - -// Function to recursively process the meta object -function processMetaObject( - obj: Record, - relativePath: string, -): Record { - const result: Record = {}; - - // Process each key in the object - for (const [key, value] of Object.entries(obj)) { - // Sanitize the key - replace hyphens with underscores, except for --prefixed keys - const sanitizedKey = key.startsWith('--') ? key : key.replace(/-/g, '_'); - - // Skip --prefixed entries (but preserve them) - if (key.startsWith('--')) { - result[sanitizedKey] = value; - continue; - } - - if (typeof value === 'string') { - // Convert string values to objects with title and href - result[sanitizedKey] = { - title: value, - href: - key === 'index' - ? `/${relativePath ? relativePath + '/' : ''}` - : `/${relativePath ? relativePath + '/' : ''}${key}`, // Keep original key for href - }; - } else if (typeof value === 'object' && value !== null) { - // Already an object, make sure it has href if it has title - if (value.title && !value.href && !value.type) { - result[sanitizedKey] = { - ...value, - href: `/${relativePath ? relativePath + '/' : ''}${key}`, // Keep original key for href - }; - } else { - // Just keep the original object - result[sanitizedKey] = value; - } - } else { - // For any other type, just pass it through - result[sanitizedKey] = value; - } - } - - return result; -} - -// Start processing from the root docs directory -processDirectory(docsDir); - -// Generate the index.ts file -const indexContent = `// Generated index file for meta-data -${metaFiles.map((file) => `import ${file.importName} from '${file.importPath}';`).join('\n')} -export { ${metaFiles.map((file) => file.importName).join(', ')} }; -`; - -// Write the index file -fs.writeFileSync(path.join(generatedDir, 'index.ts'), indexContent, 'utf8'); - -// eslint-disable-next-line no-console -console.log(`Generated ${metaFiles.length} meta files in ${generatedDir}`); diff --git a/frontend/docs/components/code/Snippet.tsx b/frontend/docs/components/code/Snippet.tsx index 6da95813a..7395cf4dd 100644 --- a/frontend/docs/components/code/Snippet.tsx +++ b/frontend/docs/components/code/Snippet.tsx @@ -4,14 +4,14 @@ import { Snippet as SnippetType } from "@/lib/snips"; interface SnippetProps { src: SnippetType; - block?: keyof SnippetType['blocks'] | 'ALL'; + block?: keyof SnippetType["blocks"] | "ALL"; } const languageMap = { - typescript: 'ts', - python: 'py', - go: 'go', - unknown: 'txt', + typescript: "ts", + python: "py", + go: "go", + unknown: "txt", }; // This is a server component that will be rendered at build time @@ -25,38 +25,38 @@ export const Snippet = ({ src, block }: SnippetProps) => { if (normalizedLanguage && normalizedLanguage in languageMap) { return languageMap[normalizedLanguage as keyof typeof languageMap]; } - return 'txt'; + return "txt"; }, [src.language]); let content = src.content; - if (block && block !== 'ALL' && src.blocks) { + if (block && block !== "ALL" && src.blocks) { if (!(block in src.blocks)) { throw new Error( - `Block ${block} not found in ${src.source} ${JSON.stringify(src.blocks, null, 2)}`, + `Block ${block} not found in ${src.source} ${JSON.stringify(src.blocks, null, 2)}` ); } - const lines = src.content.split('\n'); + const lines = src.content.split("\n"); content = lines .slice(src.blocks[block].start - 1, src.blocks[block].stop) - .join('\n'); + .join("\n"); } - const fixedSource = src.source.replace('out/', 'examples/'); + const fixedSource = src.source.replace("out/", "examples/"); return ( <> + /> ); }; diff --git a/frontend/docs/lib/generated/snips/go/bulk-operations/index.ts b/frontend/docs/lib/generated/snips/go/bulk-operations/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/bulk-operations/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/bulk-operations/main.ts b/frontend/docs/lib/generated/snips/go/bulk-operations/main.ts deleted file mode 100644 index d7b60792c..000000000 --- a/frontend/docs/lib/generated/snips/go/bulk-operations/main.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n\t\"github.com/oapi-codegen/runtime/types\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/rest\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n)\n\nfunc main() {\n\t// > Setup\n\n\thatchet, err := v1.NewHatchetClient()\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to create hatchet client: %v\", err)\n\t}\n\n\tctx := context.Background()\n\n\tworkflows, err := hatchet.Workflows().List(ctx, nil)\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to list workflows: %v\", err)\n\t}\n\n\tif workflows == nil || workflows.Rows == nil || len(*workflows.Rows) == 0 {\n\t\tlog.Fatalf(\"no workflows found\")\n\t}\n\n\tselectedWorkflow := (*workflows.Rows)[0]\n\tselectedWorkflowUUID := uuid.MustParse(selectedWorkflow.Metadata.Id)\n\n\n\t// > List runs\n\tworkflowRuns, err := hatchet.Runs().List(ctx, rest.V1WorkflowRunListParams{\n\t\tWorkflowIds: &[]types.UUID{selectedWorkflowUUID},\n\t})\n\tif err != nil || workflowRuns == nil || workflowRuns.JSON200 == nil || workflowRuns.JSON200.Rows == nil {\n\t\tlog.Fatalf(\"failed to list workflow runs for workflow %s: %v\", selectedWorkflow.Name, err)\n\t}\n\n\tvar runIds []types.UUID\n\n\tfor _, run := range workflowRuns.JSON200.Rows {\n\t\trunIds = append(runIds, uuid.MustParse(run.Metadata.Id))\n\t}\n\n\n\t// > Cancel by run ids\n\t_, err = hatchet.Runs().Cancel(ctx, rest.V1CancelTaskRequest{\n\t\tExternalIds: &runIds,\n\t})\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to cancel runs by ids: %v\", err)\n\t}\n\n\n\t// > Cancel by filters\n\ttNow := time.Now().UTC()\n\n\t_, err = hatchet.Runs().Cancel(ctx, rest.V1CancelTaskRequest{\n\t\tFilter: &rest.V1TaskFilter{\n\t\t\tSince: tNow.Add(-24 * time.Hour),\n\t\t\tUntil: &tNow,\n\t\t\tStatuses: &[]rest.V1TaskStatus{rest.V1TaskStatusRUNNING},\n\t\t\tWorkflowIds: &[]types.UUID{selectedWorkflowUUID},\n\t\t\tAdditionalMetadata: &[]string{`{\"key\": \"value\"}`},\n\t\t},\n\t})\n\tif err != nil {\n\t\tlog.Fatalf(\"failed to cancel runs by filters: %v\", err)\n\t}\n\n\n\tfmt.Println(\"cancelled all runs for workflow\", selectedWorkflow.Name)\n}\n", - "source": "out/go/bulk-operations/main.go", - "blocks": { - "setup": { - "start": 18, - "stop": 37 - }, - "list_runs": { - "start": 40, - "stop": 52 - }, - "cancel_by_run_ids": { - "start": 55, - "stop": 61 - }, - "cancel_by_filters": { - "start": 64, - "stop": 78 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/index.ts b/frontend/docs/lib/generated/snips/go/index.ts deleted file mode 100644 index 8eaeda170..000000000 --- a/frontend/docs/lib/generated/snips/go/index.ts +++ /dev/null @@ -1,17 +0,0 @@ -import * as bulk_operations from './bulk-operations'; -import * as migration_guides from './migration-guides'; -import * as quickstart from './quickstart'; -import * as run from './run'; -import * as streaming from './streaming'; -import * as worker from './worker'; -import * as workflows from './workflows'; -import * as z_v0 from './z_v0'; - -export { bulk_operations }; -export { migration_guides }; -export { quickstart }; -export { run }; -export { streaming }; -export { worker }; -export { workflows }; -export { z_v0 }; diff --git a/frontend/docs/lib/generated/snips/go/migration-guides/hatchet-client.ts b/frontend/docs/lib/generated/snips/go/migration-guides/hatchet-client.ts deleted file mode 100644 index aa78a724a..000000000 --- a/frontend/docs/lib/generated/snips/go/migration-guides/hatchet-client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package migration_guides\n\nimport (\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n)\n\nfunc HatchetClient() (v1.HatchetClient, error) {\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn hatchet, nil\n}\n", - "source": "out/go/migration-guides/hatchet-client.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/migration-guides/index.ts b/frontend/docs/lib/generated/snips/go/migration-guides/index.ts deleted file mode 100644 index d47a3d13a..000000000 --- a/frontend/docs/lib/generated/snips/go/migration-guides/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import hatchet_client from './hatchet-client'; -import mergent from './mergent'; - -export { hatchet_client } -export { mergent } diff --git a/frontend/docs/lib/generated/snips/go/migration-guides/mergent.ts b/frontend/docs/lib/generated/snips/go/migration-guides/mergent.ts deleted file mode 100644 index aa1c7dc90..000000000 --- a/frontend/docs/lib/generated/snips/go/migration-guides/mergent.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package migration_guides\n\nimport (\n\t\"bytes\"\n\t\"context\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"net/http\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\tv1worker \"github.com/hatchet-dev/hatchet/pkg/v1/worker\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\n// ProcessImage simulates image processing\nfunc ProcessImage(imageURL string, filters []string) (map[string]interface{}, error) {\n\t// Do some image processing\n\treturn map[string]interface{}{\n\t\t\"url\": imageURL,\n\t\t\"size\": 100,\n\t\t\"format\": \"png\",\n\t}, nil\n}\n\n// > Before (Mergent)\ntype MergentRequest struct {\n\tImageURL string `json:\"image_url\"`\n\tFilters []string `json:\"filters\"`\n}\n\ntype MergentResponse struct {\n\tSuccess bool `json:\"success\"`\n\tProcessedURL string `json:\"processed_url\"`\n}\n\nfunc ProcessImageMergent(req MergentRequest) (*MergentResponse, error) {\n\tresult, err := ProcessImage(req.ImageURL, req.Filters)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn &MergentResponse{\n\t\tSuccess: true,\n\t\tProcessedURL: result[\"url\"].(string),\n\t}, nil\n}\n\n\n// > After (Hatchet)\ntype ImageProcessInput struct {\n\tImageURL string `json:\"image_url\"`\n\tFilters []string `json:\"filters\"`\n}\n\ntype ImageProcessOutput struct {\n\tProcessedURL string `json:\"processed_url\"`\n\tMetadata struct {\n\t\tSize int `json:\"size\"`\n\t\tFormat string `json:\"format\"`\n\t\tAppliedFilters []string `json:\"applied_filters\"`\n\t} `json:\"metadata\"`\n}\n\nfunc ImageProcessor(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ImageProcessInput, ImageProcessOutput] {\n\tprocessor := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"image-processor\",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input ImageProcessInput) (*ImageProcessOutput, error) {\n\t\t\tresult, err := ProcessImage(input.ImageURL, input.Filters)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, fmt.Errorf(\"processing image: %w\", err)\n\t\t\t}\n\n\t\t\tif result[\"url\"] == \"\" {\n\t\t\t\treturn nil, fmt.Errorf(\"processing failed to generate URL\")\n\t\t\t}\n\n\t\t\toutput := &ImageProcessOutput{\n\t\t\t\tProcessedURL: result[\"url\"].(string),\n\t\t\t\tMetadata: struct {\n\t\t\t\t\tSize int `json:\"size\"`\n\t\t\t\t\tFormat string `json:\"format\"`\n\t\t\t\t\tAppliedFilters []string `json:\"applied_filters\"`\n\t\t\t\t}{\n\t\t\t\t\tSize: result[\"size\"].(int),\n\t\t\t\t\tFormat: result[\"format\"].(string),\n\t\t\t\t\tAppliedFilters: input.Filters,\n\t\t\t\t},\n\t\t\t}\n\n\t\t\treturn output, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\t// Example of running a task\n\t_ = func() error {\n\t\t// > Running a task\n\t\tresult, err := processor.Run(context.Background(), ImageProcessInput{\n\t\t\tImageURL: \"https://example.com/image.png\",\n\t\t\tFilters: []string{\"blur\"},\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Printf(\"Result: %+v\\n\", result)\n\t\treturn nil\n\t}\n\n\t// Example of registering a task on a worker\n\t_ = func() error {\n\t\t// > Declaring a Worker\n\t\tw, err := hatchet.Worker(v1worker.WorkerOpts{\n\t\t\tName: \"image-processor-worker\",\n\t\t\tWorkflows: []workflow.WorkflowBase{\n\t\t\t\tprocessor,\n\t\t\t},\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = w.StartBlocking(context.Background())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\treturn processor\n}\n\nfunc RunMergentTask() error {\n\n\treturn nil\n}\n\nfunc RunningTasks(hatchet v1.HatchetClient) error {\n\t// > Running a task (Mergent)\n\ttask := struct {\n\t\tRequest struct {\n\t\t\tURL string `json:\"url\"`\n\t\t\tBody string `json:\"body\"`\n\t\t\tHeaders map[string]string `json:\"headers\"`\n\t\t} `json:\"request\"`\n\t\tName string `json:\"name\"`\n\t\tQueue string `json:\"queue\"`\n\t}{\n\t\tRequest: struct {\n\t\t\tURL string `json:\"url\"`\n\t\t\tBody string `json:\"body\"`\n\t\t\tHeaders map[string]string `json:\"headers\"`\n\t\t}{\n\t\t\tURL: \"https://example.com\",\n\t\t\tHeaders: map[string]string{\n\t\t\t\t\"Authorization\": \"fake-secret-token\",\n\t\t\t\t\"Content-Type\": \"application/json\",\n\t\t\t},\n\t\t\tBody: \"Hello, world!\",\n\t\t},\n\t\tName: \"4cf95241-fa19-47ef-8a67-71e483747649\",\n\t\tQueue: \"default\",\n\t}\n\n\ttaskJSON, err := json.Marshal(task)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"marshaling task: %w\", err)\n\t}\n\n\treq, err := http.NewRequest(http.MethodPost, \"https://api.mergent.co/v2/tasks\", bytes.NewBuffer(taskJSON))\n\tif err != nil {\n\t\treturn fmt.Errorf(\"creating request: %w\", err)\n\t}\n\n\treq.Header.Add(\"Authorization\", \"Bearer \")\n\treq.Header.Add(\"Content-Type\", \"application/json\")\n\n\tclient := &http.Client{}\n\tres, err := client.Do(req)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"sending request: %w\", err)\n\t}\n\tdefer res.Body.Close()\n\n\tfmt.Printf(\"Mergent task created with status: %d\\n\", res.StatusCode)\n\n\t// > Running a task (Hatchet)\n\tprocessor := ImageProcessor(hatchet)\n\n\tresult, err := processor.Run(context.Background(), ImageProcessInput{\n\t\tImageURL: \"https://example.com/image.png\",\n\t\tFilters: []string{\"blur\"},\n\t})\n\tif err != nil {\n\t\treturn err\n\t}\n\tfmt.Printf(\"Result: %+v\\n\", result)\n\n\t// > Scheduling tasks (Hatchet)\n\t// Schedule the task to run at a specific time\n\tscheduleRef, err := processor.Schedule(\n\t\tcontext.Background(),\n\t\ttime.Now().Add(time.Second*10),\n\t\tImageProcessInput{\n\t\t\tImageURL: \"https://example.com/image.png\",\n\t\t\tFilters: []string{\"blur\"},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\t// or schedule to run every hour\n\tcronRef, err := processor.Cron(\n\t\tcontext.Background(),\n\t\t\"run-hourly\",\n\t\t\"0 * * * *\",\n\t\tImageProcessInput{\n\t\t\tImageURL: \"https://example.com/image.png\",\n\t\t\tFilters: []string{\"blur\"},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn err\n\t}\n\n\tfmt.Printf(\"Scheduled tasks with refs: %+v, %+v\\n\", scheduleRef, cronRef)\n\treturn nil\n}\n", - "source": "out/go/migration-guides/mergent.go", - "blocks": { - "before_mergent": { - "start": 30, - "stop": 51 - }, - "after_hatchet": { - "start": 54, - "stop": 99 - }, - "running_a_task": { - "start": 104, - "stop": 112 - }, - "declaring_a_worker": { - "start": 118, - "stop": 131 - }, - "running_a_task_mergent": { - "start": 144, - "stop": 189 - }, - "running_a_task_hatchet": { - "start": 192, - "stop": 201 - }, - "scheduling_tasks_hatchet": { - "start": 204, - "stop": 226 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/quickstart/cmd/index.ts b/frontend/docs/lib/generated/snips/go/quickstart/cmd/index.ts deleted file mode 100644 index 4f46a5516..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/cmd/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import * as run from './run'; -import * as worker from './worker'; - -export { run }; -export { worker }; diff --git a/frontend/docs/lib/generated/snips/go/quickstart/cmd/run/index.ts b/frontend/docs/lib/generated/snips/go/quickstart/cmd/run/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/cmd/run/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/quickstart/cmd/run/main.ts b/frontend/docs/lib/generated/snips/go/quickstart/cmd/run/main.ts deleted file mode 100644 index 9e9eb2444..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/cmd/run/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\n\thatchet_client \"github.com/hatchet-dev/hatchet/pkg/examples/quickstart/hatchet_client\"\n\tworkflows \"github.com/hatchet-dev/hatchet/pkg/examples/quickstart/workflows\"\n)\n\nfunc main() {\n\thatchet, err := hatchet_client.HatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsimple := workflows.FirstTask(hatchet)\n\n\tresult, err := simple.Run(context.Background(), workflows.SimpleInput{\n\t\tMessage: \"Hello, World!\",\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(\n\t\t\"Finished running task, and got the transformed message! The transformed message is:\",\n\t\tresult.ToLower.TransformedMessage,\n\t)\n}\n", - "source": "out/go/quickstart/cmd/run/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/quickstart/cmd/worker/index.ts b/frontend/docs/lib/generated/snips/go/quickstart/cmd/worker/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/cmd/worker/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/quickstart/cmd/worker/main.ts b/frontend/docs/lib/generated/snips/go/quickstart/cmd/worker/main.ts deleted file mode 100644 index 822f114b7..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/cmd/worker/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\thatchet_client \"github.com/hatchet-dev/hatchet/pkg/examples/quickstart/hatchet_client\"\n\tworkflows \"github.com/hatchet-dev/hatchet/pkg/examples/quickstart/workflows\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/worker\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n)\n\nfunc main() {\n\n\thatchet, err := hatchet_client.HatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := hatchet.Worker(\n\t\tworker.WorkerOpts{\n\t\t\tName: \"first-worker\",\n\t\t\tWorkflows: []workflow.WorkflowBase{\n\t\t\t\tworkflows.FirstTask(hatchet),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// we construct an interrupt context to handle Ctrl+C\n\t// you can pass in your own context.Context here to the worker\n\tinterruptCtx, cancel := cmdutils.NewInterruptContext()\n\n\tdefer cancel()\n\n\terr = worker.StartBlocking(interruptCtx)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n", - "source": "out/go/quickstart/cmd/worker/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/quickstart/hatchet_client/hatchet_client.ts b/frontend/docs/lib/generated/snips/go/quickstart/hatchet_client/hatchet_client.ts deleted file mode 100644 index 5adbcd3f0..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/hatchet_client/hatchet_client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package hatchet_client\n\nimport (\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/joho/godotenv\"\n)\n\nfunc HatchetClient() (v1.HatchetClient, error) {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\treturn v1.NewHatchetClient()\n}\n", - "source": "out/go/quickstart/hatchet_client/hatchet_client.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/quickstart/hatchet_client/index.ts b/frontend/docs/lib/generated/snips/go/quickstart/hatchet_client/index.ts deleted file mode 100644 index f04a55eaa..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/hatchet_client/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import hatchet_client from './hatchet_client'; - -export { hatchet_client } diff --git a/frontend/docs/lib/generated/snips/go/quickstart/index.ts b/frontend/docs/lib/generated/snips/go/quickstart/index.ts deleted file mode 100644 index 26684ed22..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import * as cmd from './cmd'; -import * as hatchet_client from './hatchet_client'; -import * as workflows from './workflows'; - -export { cmd }; -export { hatchet_client }; -export { workflows }; diff --git a/frontend/docs/lib/generated/snips/go/quickstart/workflows/first_task.ts b/frontend/docs/lib/generated/snips/go/quickstart/workflows/first_task.ts deleted file mode 100644 index 50c9cf936..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/workflows/first_task.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package workflows\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype SimpleInput struct {\n\tMessage string `json:\"message\"`\n}\n\ntype LowerOutput struct {\n\tTransformedMessage string `json:\"transformed_message\"`\n}\n\ntype SimpleResult struct {\n\tToLower LowerOutput\n}\n\nfunc FirstTask(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[SimpleInput, SimpleResult] {\n\tsimple := factory.NewWorkflow[SimpleInput, SimpleResult](\n\t\tcreate.WorkflowCreateOpts[SimpleInput]{\n\t\t\tName: \"first-task\",\n\t\t},\n\t\thatchet,\n\t)\n\n\tsimple.Task(\n\t\tcreate.WorkflowTask[SimpleInput, SimpleResult]{\n\t\t\tName: \"first-task\",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input SimpleInput) (any, error) {\n\t\t\tfmt.Println(\"first-task task called\")\n\t\t\treturn &LowerOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn simple\n}\n", - "source": "out/go/quickstart/workflows/first_task.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/quickstart/workflows/index.ts b/frontend/docs/lib/generated/snips/go/quickstart/workflows/index.ts deleted file mode 100644 index 5b66d3461..000000000 --- a/frontend/docs/lib/generated/snips/go/quickstart/workflows/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import first_task from './first_task'; - -export { first_task } diff --git a/frontend/docs/lib/generated/snips/go/run/all.ts b/frontend/docs/lib/generated/snips/go/run/all.ts deleted file mode 100644 index a931e3ac2..000000000 --- a/frontend/docs/lib/generated/snips/go/run/all.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"math/rand\"\n\t\"os\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n\tv1_workflows \"github.com/hatchet-dev/hatchet/examples/go/workflows\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/rest\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/joho/godotenv\"\n\t\"github.com/oapi-codegen/runtime/types\"\n)\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Get workflow name from command line arguments\n\tvar workflowName string\n\tif len(os.Args) > 1 {\n\t\tworkflowName = os.Args[1]\n\t\tfmt.Println(\"workflow name provided:\", workflowName)\n\t} else {\n\t\tfmt.Println(\"No workflow name provided. Defaulting to 'simple'\")\n\t\tworkflowName = \"simple\"\n\t}\n\n\tctx := context.Background()\n\n\t// Define workflow runners map\n\trunnerMap := map[string]func() error{\n\t\t\"simple\": func() error {\n\t\t\tsimple := v1_workflows.Simple(hatchet)\n\t\t\tresult, err := simple.Run(ctx, v1_workflows.SimpleInput{\n\t\t\t\tMessage: \"Hello, World!\",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(result.TransformedMessage)\n\t\t\treturn nil\n\t\t},\n\t\t\"child\": func() error {\n\t\t\tparent := v1_workflows.Parent(hatchet)\n\n\t\t\tresult, err := parent.Run(ctx, v1_workflows.ParentInput{\n\t\t\t\tN: 50,\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(\"Parent result:\", result.Result)\n\t\t\treturn nil\n\t\t},\n\t\t\"dag\": func() error {\n\t\t\tdag := v1_workflows.DagWorkflow(hatchet)\n\t\t\tresult, err := dag.Run(ctx, v1_workflows.DagInput{\n\t\t\t\tMessage: \"Hello, DAG!\",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(result.Step1.Step)\n\t\t\tfmt.Println(result.Step2.Step)\n\t\t\treturn nil\n\t\t},\n\t\t\"sleep\": func() error {\n\t\t\tsleep := v1_workflows.DurableSleep(hatchet)\n\t\t\t_, err := sleep.Run(ctx, v1_workflows.DurableSleepInput{\n\t\t\t\tMessage: \"Hello, Sleep!\",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(\"Sleep workflow completed\")\n\t\t\treturn nil\n\t\t},\n\t\t\"durable-event\": func() error {\n\t\t\tdurableEventWorkflow := v1_workflows.DurableEvent(hatchet)\n\t\t\trun, err := durableEventWorkflow.RunNoWait(ctx, v1_workflows.DurableEventInput{\n\t\t\t\tMessage: \"Hello, World!\",\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\n\t\t\t_, err = hatchet.Runs().Cancel(ctx, rest.V1CancelTaskRequest{\n\t\t\t\tExternalIds: &[]types.UUID{uuid.MustParse(run.WorkflowRunId())},\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\n\t\t\t_, err = run.Result()\n\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Received expected error:\", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println(\"Cancellation workflow completed unexpectedly\")\n\t\t\treturn nil\n\t\t},\n\t\t\"timeout\": func() error {\n\t\t\ttimeout := v1_workflows.Timeout(hatchet)\n\t\t\t_, err := timeout.Run(ctx, v1_workflows.TimeoutInput{})\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Received expected error:\", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println(\"Timeout workflow completed unexpectedly\")\n\t\t\treturn nil\n\t\t},\n\t\t\"sticky\": func() error {\n\t\t\tsticky := v1_workflows.Sticky(hatchet)\n\t\t\tresult, err := sticky.Run(ctx, v1_workflows.StickyInput{})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(\"Value from child workflow:\", result.Result)\n\t\t\treturn nil\n\t\t},\n\t\t\"sticky-dag\": func() error {\n\t\t\tstickyDag := v1_workflows.StickyDag(hatchet)\n\t\t\tresult, err := stickyDag.Run(ctx, v1_workflows.StickyInput{})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(\"Value from task 1:\", result.StickyTask1.Result)\n\t\t\tfmt.Println(\"Value from task 2:\", result.StickyTask2.Result)\n\t\t\treturn nil\n\t\t},\n\t\t\"retries\": func() error {\n\t\t\tretries := v1_workflows.Retries(hatchet)\n\t\t\t_, err := retries.Run(ctx, v1_workflows.RetriesInput{})\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Received expected error:\", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println(\"Retries workflow completed unexpectedly\")\n\t\t\treturn nil\n\t\t},\n\t\t\"retries-count\": func() error {\n\t\t\tretriesCount := v1_workflows.RetriesWithCount(hatchet)\n\t\t\tresult, err := retriesCount.Run(ctx, v1_workflows.RetriesWithCountInput{})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(\"Result message:\", result.Message)\n\t\t\treturn nil\n\t\t},\n\t\t\"with-backoff\": func() error {\n\t\t\twithBackoff := v1_workflows.WithBackoff(hatchet)\n\t\t\t_, err := withBackoff.Run(ctx, v1_workflows.BackoffInput{})\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Received expected error:\", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println(\"WithBackoff workflow completed unexpectedly\")\n\t\t\treturn nil\n\t\t},\n\t\t\"non-retryable\": func() error {\n\t\t\tnonRetryable := v1_workflows.NonRetryableError(hatchet)\n\t\t\t_, err := nonRetryable.Run(ctx, v1_workflows.NonRetryableInput{})\n\t\t\tif err != nil {\n\t\t\t\tfmt.Println(\"Received expected error:\", err)\n\t\t\t\treturn nil // We expect an error here\n\t\t\t}\n\t\t\tfmt.Println(\"NonRetryable workflow completed unexpectedly\")\n\t\t\treturn nil\n\t\t},\n\t\t\"on-cron\": func() error {\n\t\t\tcronTask := v1_workflows.OnCron(hatchet)\n\t\t\tresult, err := cronTask.Run(ctx, v1_workflows.OnCronInput{\n\t\t\t\tMessage: \"Hello, Cron!\",\n\t\t\t})\n\t\t\tif err != nil {\n\t\t\t\treturn err\n\t\t\t}\n\t\t\tfmt.Println(\"Cron task result:\", result.Job.TransformedMessage)\n\t\t\treturn nil\n\t\t},\n\t\t\"priority\": func() error {\n\n\t\t\tnRuns := 10\n\t\t\tpriorityWorkflow := v1_workflows.Priority(hatchet)\n\n\t\t\tfor i := 0; i < nRuns; i++ {\n\t\t\t\trandomPrio := int32(rand.Intn(3) + 1)\n\n\t\t\t\tfmt.Println(\"Random priority:\", randomPrio)\n\n\t\t\t\tpriorityWorkflow.RunNoWait(ctx, v1_workflows.PriorityInput{\n\t\t\t\t\tUserId: \"1234\",\n\t\t\t\t}, client.WithRunMetadata(map[string]int32{\"priority\": randomPrio}), client.WithPriority(randomPrio))\n\t\t\t}\n\n\t\t\ttriggerAt := time.Now().Add(time.Second + 5)\n\n\t\t\tfor i := 0; i < nRuns; i++ {\n\t\t\t\trandomPrio := int32(rand.Intn(3) + 1)\n\n\t\t\t\tfmt.Println(\"Random priority:\", randomPrio)\n\n\t\t\t\tpriorityWorkflow.Schedule(ctx, triggerAt, v1_workflows.PriorityInput{\n\t\t\t\t\tUserId: \"1234\",\n\t\t\t\t}, client.WithRunMetadata(map[string]int32{\"priority\": randomPrio}), client.WithPriority(randomPrio))\n\t\t\t}\n\n\t\t\treturn nil\n\t\t},\n\t}\n\n\t// Lookup workflow runner from map\n\trunner, ok := runnerMap[workflowName]\n\tif !ok {\n\t\tfmt.Println(\"Invalid workflow name provided. Usage: go run examples/v1/run/simple.go [workflow-name]\")\n\t\tfmt.Println(\"Available workflows:\", getAvailableWorkflows(runnerMap))\n\t\tos.Exit(1)\n\t}\n\n\t// Run the selected workflow\n\terr = runner()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n\n// Helper function to get available workflows as a formatted string\nfunc getAvailableWorkflows(runnerMap map[string]func() error) string {\n\tvar workflows string\n\tcount := 0\n\tfor name := range runnerMap {\n\t\tif count > 0 {\n\t\t\tworkflows += \", \"\n\t\t}\n\t\tworkflows += fmt.Sprintf(\"'%s'\", name)\n\t\tcount++\n\t}\n\treturn workflows\n}\n", - "source": "out/go/run/all.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/run/bulk.ts b/frontend/docs/lib/generated/snips/go/run/bulk.ts deleted file mode 100644 index b77a3152f..000000000 --- a/frontend/docs/lib/generated/snips/go/run/bulk.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\n\tv1_workflows \"github.com/hatchet-dev/hatchet/examples/go/workflows\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/joho/godotenv\"\n)\n\nfunc bulk() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tctx := context.Background()\n\t// > Bulk Run Tasks\n\tsimple := v1_workflows.Simple(hatchet)\n\tbulkRunIds, err := simple.RunBulkNoWait(ctx, []v1_workflows.SimpleInput{\n\t\t{\n\t\t\tMessage: \"Hello, World!\",\n\t\t},\n\t\t{\n\t\t\tMessage: \"Hello, Moon!\",\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(bulkRunIds)\n}\n", - "source": "out/go/run/bulk.go", - "blocks": { - "bulk_run_tasks": { - "start": 26, - "stop": 40 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/run/cron.ts b/frontend/docs/lib/generated/snips/go/run/cron.ts deleted file mode 100644 index 6693dd2da..000000000 --- a/frontend/docs/lib/generated/snips/go/run/cron.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\n\tv1_workflows \"github.com/hatchet-dev/hatchet/examples/go/workflows\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/rest\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/joho/godotenv\"\n)\n\nfunc cron() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// > Create\n\tsimple := v1_workflows.Simple(hatchet)\n\n\tctx := context.Background()\n\n\tresult, err := simple.Cron(\n\t\tctx,\n\t\t\"daily-run\",\n\t\t\"0 0 * * *\",\n\t\tv1_workflows.SimpleInput{\n\t\t\tMessage: \"Hello, World!\",\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// it may be useful to save the cron id for later\n\tfmt.Println(result.Metadata.Id)\n\n\t// > Delete\n\thatchet.Crons().Delete(ctx, result.Metadata.Id)\n\n\t// > List\n\tcrons, err := hatchet.Crons().List(ctx, rest.CronWorkflowListParams{\n\t\tAdditionalMetadata: &[]string{\"user:daily-run\"},\n\t})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tfmt.Println(crons)\n}\n", - "source": "out/go/run/cron.go", - "blocks": { - "create": { - "start": 25, - "stop": 43 - }, - "delete": { - "start": 46, - "stop": 46 - }, - "list": { - "start": 49, - "stop": 51 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/run/event.ts b/frontend/docs/lib/generated/snips/go/run/event.ts deleted file mode 100644 index cf09fc274..000000000 --- a/frontend/docs/lib/generated/snips/go/run/event.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\n\t\"github.com/google/uuid\"\n\t\"github.com/joho/godotenv\"\n\n\tv1_workflows \"github.com/hatchet-dev/hatchet/examples/go/workflows\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/rest\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n)\n\nfunc event() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// > Pushing an Event\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t\"simple-event:create\",\n\t\tv1_workflows.SimpleInput{\n\t\t\tMessage: \"Hello, World!\",\n\t\t},\n\t)\n\n\t// > Create a filter\n\tpayload := map[string]interface{}{\n\t\t\"main_character\": \"Anna\",\n\t\t\"supporting_character\": \"Stiva\",\n\t\t\"location\": \"Moscow\",\n\t}\n\n\t_, err = hatchet.Filters().Create(\n\t\tcontext.Background(),\n\t\trest.V1CreateFilterRequest{\n\t\t\tWorkflowId: uuid.New(),\n\t\t\tExpression: \"input.shouldSkip == false\",\n\t\t\tScope: \"foobarbaz\",\n\t\t\tPayload: &payload,\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > Skip a run\n\tskipPayload := map[string]interface{}{\n\t\t\"shouldSkip\": true,\n\t}\n\tskipScope := \"foobarbaz\"\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t\"simple-event:create\",\n\t\tskipPayload,\n\t\tclient.WithFilterScope(&skipScope),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > Trigger a run\n\ttriggerPayload := map[string]interface{}{\n\t\t\"shouldSkip\": false,\n\t}\n\ttriggerScope := \"foobarbaz\"\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t\"simple-event:create\",\n\t\ttriggerPayload,\n\t\tclient.WithFilterScope(&triggerScope),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n", - "source": "out/go/run/event.go", - "blocks": { - "pushing_an_event": { - "start": 27, - "stop": 33 - }, - "create_a_filter": { - "start": 36, - "stop": 50 - }, - "skip_a_run": { - "start": 57, - "stop": 66 - }, - "trigger_a_run": { - "start": 73, - "stop": 82 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/run/index.ts b/frontend/docs/lib/generated/snips/go/run/index.ts deleted file mode 100644 index 7cfb2a899..000000000 --- a/frontend/docs/lib/generated/snips/go/run/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import all from './all'; -import bulk from './bulk'; -import cron from './cron'; -import event from './event'; -import priority from './priority'; -import simple from './simple'; - -export { all } -export { bulk } -export { cron } -export { event } -export { priority } -export { simple } diff --git a/frontend/docs/lib/generated/snips/go/run/priority.ts b/frontend/docs/lib/generated/snips/go/run/priority.ts deleted file mode 100644 index 3d1262728..000000000 --- a/frontend/docs/lib/generated/snips/go/run/priority.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\tv1_workflows \"github.com/hatchet-dev/hatchet/examples/go/workflows\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/joho/godotenv\"\n)\n\nfunc priority() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tctx := context.Background()\n\n\tpriorityWorkflow := v1_workflows.Priority(hatchet)\n\n\t// > Running a Task with Priority\n\tpriority := int32(3)\n\n\trunId, err := priorityWorkflow.RunNoWait(ctx, v1_workflows.PriorityInput{\n\t\tUserId: \"1234\",\n\t}, client.WithPriority(priority))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(runId)\n\n\t// > Schedule and cron\n\tschedulePriority := int32(3)\n\trunAt := time.Now().Add(time.Minute)\n\n\tscheduledRunId, _ := priorityWorkflow.Schedule(ctx, runAt, v1_workflows.PriorityInput{\n\t\tUserId: \"1234\",\n\t}, client.WithPriority(schedulePriority))\n\n\tcronId, _ := priorityWorkflow.Cron(ctx, \"my-cron\", \"* * * * *\", v1_workflows.PriorityInput{\n\t\tUserId: \"1234\",\n\t}, client.WithPriority(schedulePriority))\n\n\tfmt.Println(scheduledRunId)\n\tfmt.Println(cronId)\n\n}\n", - "source": "out/go/run/priority.go", - "blocks": { - "running_a_task_with_priority": { - "start": 31, - "stop": 35 - }, - "schedule_and_cron": { - "start": 44, - "stop": 53 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/run/simple.ts b/frontend/docs/lib/generated/snips/go/run/simple.ts deleted file mode 100644 index 4b2c7e015..000000000 --- a/frontend/docs/lib/generated/snips/go/run/simple.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"sync\"\n\n\tv1_workflows \"github.com/hatchet-dev/hatchet/examples/go/workflows\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/joho/godotenv\"\n)\n\nfunc simple() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tctx := context.Background()\n\t// > Running a Task\n\tsimple := v1_workflows.Simple(hatchet)\n\tresult, err := simple.Run(ctx, v1_workflows.SimpleInput{\n\t\tMessage: \"Hello, World!\",\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(result.TransformedMessage)\n\n\t// > Running Multiple Tasks\n\tvar results []string\n\tvar resultsMutex sync.Mutex\n\tvar errs []error\n\tvar errsMutex sync.Mutex\n\n\twg := sync.WaitGroup{}\n\twg.Add(2)\n\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tresult, err := simple.Run(ctx, v1_workflows.SimpleInput{\n\t\t\tMessage: \"Hello, World!\",\n\t\t})\n\n\t\tif err != nil {\n\t\t\terrsMutex.Lock()\n\t\t\terrs = append(errs, err)\n\t\t\terrsMutex.Unlock()\n\t\t\treturn\n\t\t}\n\n\t\tresultsMutex.Lock()\n\t\tresults = append(results, result.TransformedMessage)\n\t\tresultsMutex.Unlock()\n\t}()\n\n\tgo func() {\n\t\tdefer wg.Done()\n\t\tresult, err := simple.Run(ctx, v1_workflows.SimpleInput{\n\t\t\tMessage: \"Hello, Moon!\",\n\t\t})\n\n\t\tif err != nil {\n\t\t\terrsMutex.Lock()\n\t\t\terrs = append(errs, err)\n\t\t\terrsMutex.Unlock()\n\t\t\treturn\n\t\t}\n\n\t\tresultsMutex.Lock()\n\t\tresults = append(results, result.TransformedMessage)\n\t\tresultsMutex.Unlock()\n\t}()\n\n\twg.Wait()\n\n\t// > Running a Task Without Waiting\n\tsimple = v1_workflows.Simple(hatchet)\n\trunRef, err := simple.RunNoWait(ctx, v1_workflows.SimpleInput{\n\t\tMessage: \"Hello, World!\",\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// The Run Ref Exposes an ID that can be used to wait for the task to complete\n\t// or check on the status of the task\n\trunId := runRef.RunId()\n\tfmt.Println(runId)\n\n\t// > Subscribing to results\n\t// finally, we can wait for the task to complete and get the result\n\tfinalResult, err := runRef.Result()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfmt.Println(finalResult)\n}\n", - "source": "out/go/run/simple.go", - "blocks": { - "running_a_task": { - "start": 27, - "stop": 36 - }, - "running_multiple_tasks": { - "start": 39, - "stop": 83 - }, - "running_a_task_without_waiting": { - "start": 86, - "stop": 98 - }, - "subscribing_to_results": { - "start": 101, - "stop": 108 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/streaming/consumer/index.ts b/frontend/docs/lib/generated/snips/go/streaming/consumer/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/consumer/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/streaming/consumer/main.ts b/frontend/docs/lib/generated/snips/go/streaming/consumer/main.ts deleted file mode 100644 index 8bf59d797..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/consumer/main.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/hatchet-dev/hatchet/examples/go/streaming/shared\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n)\n\n// > Consume\nfunc main() {\n\thatchet, err := v1.NewHatchetClient()\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to create Hatchet client: %v\", err)\n\t}\n\n\tctx := context.Background()\n\n\tstreamingWorkflow := shared.StreamingWorkflow(hatchet)\n\n\tworkflowRun, err := streamingWorkflow.RunNoWait(ctx, shared.StreamTaskInput{})\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to run workflow: %v\", err)\n\t}\n\n\tid := workflowRun.RunId()\n\tstream, err := hatchet.Runs().SubscribeToStream(ctx, id)\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to subscribe to stream: %v\", err)\n\t}\n\n\tfor content := range stream {\n\t\tfmt.Print(content)\n\t}\n\n\tfmt.Println(\"\\nStreaming completed!\")\n}\n\n", - "source": "out/go/streaming/consumer/main.go", - "blocks": { - "consume": { - "start": 13, - "stop": 40 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/streaming/index.ts b/frontend/docs/lib/generated/snips/go/streaming/index.ts deleted file mode 100644 index 94182226d..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import * as consumer from './consumer'; -import * as server from './server'; -import * as shared from './shared'; -import * as worker from './worker'; - -export { consumer }; -export { server }; -export { shared }; -export { worker }; diff --git a/frontend/docs/lib/generated/snips/go/streaming/server/index.ts b/frontend/docs/lib/generated/snips/go/streaming/server/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/server/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/streaming/server/main.ts b/frontend/docs/lib/generated/snips/go/streaming/server/main.ts deleted file mode 100644 index cba929521..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/server/main.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"net/http\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/examples/go/streaming/shared\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n)\n\n// > Server\nfunc main() {\n\thatchet, err := v1.NewHatchetClient()\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to create Hatchet client: %v\", err)\n\t}\n\n\tstreamingWorkflow := shared.StreamingWorkflow(hatchet)\n\n\thttp.HandleFunc(\"/stream\", func(w http.ResponseWriter, r *http.Request) {\n\t\tctx := context.Background()\n\n\t\tw.Header().Set(\"Content-Type\", \"text/plain\")\n\t\tw.Header().Set(\"Cache-Control\", \"no-cache\")\n\t\tw.Header().Set(\"Connection\", \"keep-alive\")\n\n\t\tworkflowRun, err := streamingWorkflow.RunNoWait(ctx, shared.StreamTaskInput{})\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tstream, err := hatchet.Runs().SubscribeToStream(ctx, workflowRun.RunId())\n\t\tif err != nil {\n\t\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\t\treturn\n\t\t}\n\n\t\tflusher, _ := w.(http.Flusher)\n\t\tfor content := range stream {\n\t\t\tfmt.Fprint(w, content)\n\t\t\tif flusher != nil {\n\t\t\t\tflusher.Flush()\n\t\t\t}\n\t\t}\n\t})\n\n\tserver := &http.Server{\n\t\tAddr: \":8000\",\n\t\tReadTimeout: 5 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t}\n\n\tif err := server.ListenAndServe(); err != nil {\n\t\tlog.Println(\"Failed to start server:\", err)\n\t}\n}\n\n", - "source": "out/go/streaming/server/main.go", - "blocks": { - "server": { - "start": 15, - "stop": 61 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/streaming/shared/index.ts b/frontend/docs/lib/generated/snips/go/streaming/shared/index.ts deleted file mode 100644 index beb5d567c..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/shared/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import task from './task'; - -export { task } diff --git a/frontend/docs/lib/generated/snips/go/streaming/shared/task.ts b/frontend/docs/lib/generated/snips/go/streaming/shared/task.ts deleted file mode 100644 index 04f55f9e9..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/shared/task.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package shared\n\nimport (\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype StreamTaskInput struct{}\n\ntype StreamTaskOutput struct {\n\tMessage string `json:\"message\"`\n}\n\n// > Streaming\nconst annaKarenina = `\nHappy families are all alike; every unhappy family is unhappy in its own way.\n\nEverything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him.\n`\n\nfunc createChunks(content string, n int) []string {\n\tvar chunks []string\n\tfor i := 0; i < len(content); i += n {\n\t\tend := i + n\n\t\tif end > len(content) {\n\t\t\tend = len(content)\n\t\t}\n\t\tchunks = append(chunks, content[i:end])\n\t}\n\treturn chunks\n}\n\nfunc StreamTask(ctx worker.HatchetContext, input StreamTaskInput) (*StreamTaskOutput, error) {\n\ttime.Sleep(2 * time.Second)\n\n\tchunks := createChunks(annaKarenina, 10)\n\n\tfor _, chunk := range chunks {\n\t\tctx.PutStream(chunk)\n\t\ttime.Sleep(200 * time.Millisecond)\n\t}\n\n\treturn &StreamTaskOutput{\n\t\tMessage: \"Streaming completed\",\n\t}, nil\n}\n\n\nfunc StreamingWorkflow(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[StreamTaskInput, StreamTaskOutput] {\n\treturn factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"stream-example\",\n\t\t},\n\t\tStreamTask,\n\t\thatchet,\n\t)\n}\n", - "source": "out/go/streaming/shared/task.go", - "blocks": { - "streaming": { - "start": 20, - "stop": 52 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/streaming/worker/index.ts b/frontend/docs/lib/generated/snips/go/streaming/worker/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/worker/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/streaming/worker/main.ts b/frontend/docs/lib/generated/snips/go/streaming/worker/main.ts deleted file mode 100644 index 990d6ae30..000000000 --- a/frontend/docs/lib/generated/snips/go/streaming/worker/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"log\"\n\n\t\"github.com/hatchet-dev/hatchet/examples/go/streaming/shared\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\tv1worker \"github.com/hatchet-dev/hatchet/pkg/v1/worker\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n)\n\nfunc main() {\n\thatchet, err := v1.NewHatchetClient()\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to create Hatchet client: %v\", err)\n\t}\n\n\tstreamingWorkflow := shared.StreamingWorkflow(hatchet)\n\n\tw, err := hatchet.Worker(v1worker.WorkerOpts{\n\t\tName: \"streaming-worker\",\n\t\tWorkflows: []workflow.WorkflowBase{\n\t\t\tstreamingWorkflow,\n\t\t},\n\t})\n\tif err != nil {\n\t\tlog.Fatalf(\"Failed to create worker: %v\", err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.NewInterruptContext()\n\tdefer cancel()\n\n\tlog.Println(\"Starting streaming worker...\")\n\n\tif err := w.StartBlocking(interruptCtx); err != nil {\n\t\tlog.Println(\"Worker failed:\", err)\n\t}\n}\n", - "source": "out/go/streaming/worker/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/worker/index.ts b/frontend/docs/lib/generated/snips/go/worker/index.ts deleted file mode 100644 index da6ef660a..000000000 --- a/frontend/docs/lib/generated/snips/go/worker/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import start from './start'; - -export { start } diff --git a/frontend/docs/lib/generated/snips/go/worker/start.ts b/frontend/docs/lib/generated/snips/go/worker/start.ts deleted file mode 100644 index c715dec76..000000000 --- a/frontend/docs/lib/generated/snips/go/worker/start.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"os\"\n\t\"time\"\n\n\tv1_workflows \"github.com/hatchet-dev/hatchet/examples/go/workflows\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/worker\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/joho/godotenv\"\n)\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// Get workflow name from command line arguments\n\tvar workflowName string\n\tif len(os.Args) > 1 {\n\t\tworkflowName = os.Args[1]\n\t\tfmt.Println(\"workflow name provided:\", workflowName)\n\t}\n\n\t// Define workflows map\n\tworkflowMap := map[string][]workflow.WorkflowBase{\n\t\t\"dag\": {v1_workflows.DagWorkflow(hatchet)},\n\t\t\"on-failure\": {v1_workflows.OnFailure(hatchet)},\n\t\t\"simple\": {v1_workflows.Simple(hatchet)},\n\t\t\"sleep\": {v1_workflows.DurableSleep(hatchet)},\n\t\t\"child\": {v1_workflows.Parent(hatchet), v1_workflows.Child(hatchet)},\n\t\t\"cancellation\": {v1_workflows.Cancellation(hatchet)},\n\t\t\"timeout\": {v1_workflows.Timeout(hatchet)},\n\t\t\"sticky\": {v1_workflows.Sticky(hatchet), v1_workflows.StickyDag(hatchet), v1_workflows.Child(hatchet)},\n\t\t\"retries\": {v1_workflows.Retries(hatchet), v1_workflows.RetriesWithCount(hatchet), v1_workflows.WithBackoff(hatchet)},\n\t\t\"on-cron\": {v1_workflows.OnCron(hatchet)},\n\t\t\"non-retryable\": {v1_workflows.NonRetryableError(hatchet)},\n\t\t\"priority\": {v1_workflows.Priority(hatchet)},\n\t}\n\n\t// Add an \"all\" option that registers all workflows\n\tallWorkflows := []workflow.WorkflowBase{}\n\tfor _, wfs := range workflowMap {\n\t\tallWorkflows = append(allWorkflows, wfs...)\n\t}\n\tworkflowMap[\"all\"] = allWorkflows\n\n\t// Lookup workflow from map\n\tworkflow, ok := workflowMap[workflowName]\n\tif !ok {\n\t\tfmt.Println(\"Invalid workflow name provided. Usage: go run examples/v1/worker/start.go [workflow-name]\")\n\t\tfmt.Println(\"Available workflows:\", getAvailableWorkflows(workflowMap))\n\t\tos.Exit(1)\n\t}\n\n\tvar slots int\n\tif workflowName == \"priority\" {\n\t\tslots = 1\n\t} else {\n\t\tslots = 100\n\t}\n\n\tworker, err := hatchet.Worker(\n\t\tworker.WorkerOpts{\n\t\t\tName: fmt.Sprintf(\"%s-worker\", workflowName),\n\t\t\tWorkflows: workflow,\n\t\t\tSlots: slots,\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.NewInterruptContext()\n\n\terr = worker.StartBlocking(interruptCtx)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tgo func() {\n\t\ttime.Sleep(10 * time.Second)\n\t\tcancel()\n\t}()\n}\n\n// Helper function to get available workflows as a formatted string\nfunc getAvailableWorkflows(workflowMap map[string][]workflow.WorkflowBase) string {\n\tvar workflows string\n\tcount := 0\n\tfor name := range workflowMap {\n\t\tif count > 0 {\n\t\t\tworkflows += \", \"\n\t\t}\n\t\tworkflows += fmt.Sprintf(\"'%s'\", name)\n\t\tcount++\n\t}\n\treturn workflows\n}\n", - "source": "out/go/worker/start.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/cancellations.ts b/frontend/docs/lib/generated/snips/go/workflows/cancellations.ts deleted file mode 100644 index 6aa76444f..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/cancellations.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"errors\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype CancellationInput struct{}\ntype CancellationResult struct {\n\tCompleted bool\n}\n\nfunc Cancellation(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[CancellationInput, CancellationResult] {\n\n\t// > Cancelled task\n\t// Create a task that sleeps for 10 seconds and checks if it was cancelled\n\tcancellation := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"cancellation-task\",\n\t\t}, func(ctx worker.HatchetContext, input CancellationInput) (*CancellationResult, error) {\n\t\t\t// Sleep for 10 seconds\n\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t// Check if the context was cancelled\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil, errors.New(\"Task was cancelled\")\n\t\t\tdefault:\n\t\t\t\t// Continue execution\n\t\t\t}\n\n\t\t\treturn &CancellationResult{\n\t\t\t\tCompleted: true,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn cancellation\n}\n", - "source": "out/go/workflows/cancellations.go", - "blocks": { - "cancelled_task": { - "start": 22, - "stop": 43 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/child-workflows.ts b/frontend/docs/lib/generated/snips/go/workflows/child-workflows.ts deleted file mode 100644 index a37327ca2..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/child-workflows.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype ChildInput struct {\n\tN int `json:\"n\"`\n}\n\ntype ValueOutput struct {\n\tValue int `json:\"value\"`\n}\n\ntype ParentInput struct {\n\tN int `json:\"n\"`\n}\n\ntype SumOutput struct {\n\tResult int `json:\"result\"`\n}\n\nfunc Child(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ChildInput, ValueOutput] {\n\tchild := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"child\",\n\t\t}, func(ctx worker.HatchetContext, input ChildInput) (*ValueOutput, error) {\n\t\t\treturn &ValueOutput{\n\t\t\t\tValue: input.N,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn child\n}\n\nfunc Parent(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ParentInput, SumOutput] {\n\n\tchild := Child(hatchet)\n\tparent := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"parent\",\n\t\t}, func(ctx worker.HatchetContext, input ParentInput) (*SumOutput, error) {\n\n\t\t\tsum := 0\n\n\t\t\t// Launch child workflows in parallel\n\t\t\tresults := make([]*ValueOutput, 0, input.N)\n\t\t\tfor j := 0; j < input.N; j++ {\n\t\t\t\tresult, err := child.RunAsChild(ctx, ChildInput{N: j}, workflow.RunAsChildOpts{})\n\n\t\t\t\tif err != nil {\n\t\t\t\t\t// firstErr = err\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\n\t\t\t\tresults = append(results, result)\n\n\t\t\t}\n\n\t\t\t// Sum results from all children\n\t\t\tfor _, result := range results {\n\t\t\t\tsum += result.Value\n\t\t\t}\n\n\t\t\treturn &SumOutput{\n\t\t\t\tResult: sum,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn parent\n}\n", - "source": "out/go/workflows/child-workflows.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/complex-conditions.ts b/frontend/docs/lib/generated/snips/go/workflows/complex-conditions.ts deleted file mode 100644 index 661f33722..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/complex-conditions.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"math/rand\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker/condition\"\n)\n\n// StepOutput represents the output of most tasks in this workflow\ntype StepOutput struct {\n\tRandomNumber int `json:\"randomNumber\"`\n}\n\n// RandomSum represents the output of the sum task\ntype RandomSum struct {\n\tSum int `json:\"sum\"`\n}\n\n// TaskConditionWorkflowResult represents the aggregate output of all tasks\ntype TaskConditionWorkflowResult struct {\n\tStart StepOutput `json:\"start\"`\n\tWaitForSleep StepOutput `json:\"waitForSleep\"`\n\tWaitForEvent StepOutput `json:\"waitForEvent\"`\n\tSkipOnEvent StepOutput `json:\"skipOnEvent\"`\n\tLeftBranch StepOutput `json:\"leftBranch\"`\n\tRightBranch StepOutput `json:\"rightBranch\"`\n\tSum RandomSum `json:\"sum\"`\n}\n\n// taskOpts is a type alias for workflow task options\ntype taskOpts = create.WorkflowTask[struct{}, TaskConditionWorkflowResult]\n\nfunc TaskConditionWorkflow(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[struct{}, TaskConditionWorkflowResult] {\n\t// > Create a workflow\n\twf := factory.NewWorkflow[struct{}, TaskConditionWorkflowResult](\n\t\tcreate.WorkflowCreateOpts[struct{}]{\n\t\t\tName: \"TaskConditionWorkflow\",\n\t\t},\n\t\thatchet,\n\t)\n\n\t// > Add base task\n\tstart := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: \"start\",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add wait for sleep\n\twaitForSleep := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: \"waitForSleep\",\n\t\t\tParents: []create.NamedTask{start},\n\t\t\tWaitFor: condition.SleepCondition(time.Second * 10),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add skip on event\n\tskipOnEvent := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: \"skipOnEvent\",\n\t\t\tParents: []create.NamedTask{start},\n\t\t\tWaitFor: condition.SleepCondition(time.Second * 30),\n\t\t\tSkipIf: condition.UserEventCondition(\"skip_on_event:skip\", \"true\"),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add branching\n\tleftBranch := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: \"leftBranch\",\n\t\t\tParents: []create.NamedTask{waitForSleep},\n\t\t\tSkipIf: condition.ParentCondition(waitForSleep, \"output.randomNumber > 50\"),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\trightBranch := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: \"rightBranch\",\n\t\t\tParents: []create.NamedTask{waitForSleep},\n\t\t\tSkipIf: condition.ParentCondition(waitForSleep, \"output.randomNumber <= 50\"),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add wait for event\n\twaitForEvent := wf.Task(\n\t\ttaskOpts{\n\t\t\tName: \"waitForEvent\",\n\t\t\tParents: []create.NamedTask{start},\n\t\t\tWaitFor: condition.Or(\n\t\t\t\tcondition.SleepCondition(time.Minute),\n\t\t\t\tcondition.UserEventCondition(\"wait_for_event:start\", \"true\"),\n\t\t\t),\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\treturn &StepOutput{\n\t\t\t\tRandomNumber: rand.Intn(100) + 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Add sum\n\twf.Task(\n\t\ttaskOpts{\n\t\t\tName: \"sum\",\n\t\t\tParents: []create.NamedTask{\n\t\t\t\tstart,\n\t\t\t\twaitForSleep,\n\t\t\t\twaitForEvent,\n\t\t\t\tskipOnEvent,\n\t\t\t\tleftBranch,\n\t\t\t\trightBranch,\n\t\t\t},\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ struct{}) (interface{}, error) {\n\t\t\tvar startOutput StepOutput\n\t\t\tif err := ctx.ParentOutput(start, &startOutput); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tvar waitForSleepOutput StepOutput\n\t\t\tif err := ctx.ParentOutput(waitForSleep, &waitForSleepOutput); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tvar waitForEventOutput StepOutput\n\t\t\tctx.ParentOutput(waitForEvent, &waitForEventOutput)\n\n\t\t\t// Handle potentially skipped tasks\n\t\t\tvar skipOnEventOutput StepOutput\n\t\t\tvar four int\n\n\t\t\terr := ctx.ParentOutput(skipOnEvent, &skipOnEventOutput)\n\n\t\t\tif err != nil {\n\t\t\t\tfour = 0\n\t\t\t} else {\n\t\t\t\tfour = skipOnEventOutput.RandomNumber\n\t\t\t}\n\n\t\t\tvar leftBranchOutput StepOutput\n\t\t\tvar five int\n\n\t\t\terr = ctx.ParentOutput(leftBranch, leftBranchOutput)\n\t\t\tif err != nil {\n\t\t\t\tfive = 0\n\t\t\t} else {\n\t\t\t\tfive = leftBranchOutput.RandomNumber\n\t\t\t}\n\n\t\t\tvar rightBranchOutput StepOutput\n\t\t\tvar six int\n\n\t\t\terr = ctx.ParentOutput(rightBranch, rightBranchOutput)\n\t\t\tif err != nil {\n\t\t\t\tsix = 0\n\t\t\t} else {\n\t\t\t\tsix = rightBranchOutput.RandomNumber\n\t\t\t}\n\n\t\t\treturn &RandomSum{\n\t\t\t\tSum: startOutput.RandomNumber + waitForEventOutput.RandomNumber +\n\t\t\t\t\twaitForSleepOutput.RandomNumber + four + five + six,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn wf\n}\n", - "source": "out/go/workflows/complex-conditions.go", - "blocks": { - "create_a_workflow": { - "start": 41, - "stop": 46 - }, - "add_base_task": { - "start": 49, - "stop": 58 - }, - "add_wait_for_sleep": { - "start": 61, - "stop": 72 - }, - "add_skip_on_event": { - "start": 75, - "stop": 87 - }, - "add_branching": { - "start": 90, - "stop": 114 - }, - "add_wait_for_event": { - "start": 117, - "stop": 131 - }, - "add_sum": { - "start": 134, - "stop": 197 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/concurrency-rr.ts b/frontend/docs/lib/generated/snips/go/workflows/concurrency-rr.ts deleted file mode 100644 index a4a136e5c..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/concurrency-rr.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"math/rand\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype ConcurrencyInput struct {\n\tMessage string\n\tTier string\n\tAccount string\n}\n\ntype TransformedOutput struct {\n\tTransformedMessage string\n}\n\nfunc ConcurrencyRoundRobin(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ConcurrencyInput, TransformedOutput] {\n\t// > Concurrency Strategy With Key\n\tvar maxRuns int32 = 1\n\tstrategy := types.GroupRoundRobin\n\n\tconcurrency := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"simple-concurrency\",\n\t\t\tConcurrency: []*types.Concurrency{\n\t\t\t\t{\n\t\t\t\t\tExpression: \"input.GroupKey\",\n\t\t\t\t\tMaxRuns: &maxRuns,\n\t\t\t\t\tLimitStrategy: &strategy,\n\t\t\t\t},\n\t\t\t},\n\t\t}, func(ctx worker.HatchetContext, input ConcurrencyInput) (*TransformedOutput, error) {\n\t\t\t// Random sleep between 200ms and 1000ms\n\t\t\ttime.Sleep(time.Duration(200+rand.Intn(800)) * time.Millisecond)\n\n\t\t\treturn &TransformedOutput{\n\t\t\t\tTransformedMessage: input.Message,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn concurrency\n}\n\nfunc MultipleConcurrencyKeys(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[ConcurrencyInput, TransformedOutput] {\n\t// > Multiple Concurrency Keys\n\tstrategy := types.GroupRoundRobin\n\tvar maxRuns int32 = 20\n\n\tconcurrency := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"simple-concurrency\",\n\t\t\tConcurrency: []*types.Concurrency{\n\t\t\t\t{\n\t\t\t\t\tExpression: \"input.Tier\",\n\t\t\t\t\tMaxRuns: &maxRuns,\n\t\t\t\t\tLimitStrategy: &strategy,\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\tExpression: \"input.Account\",\n\t\t\t\t\tMaxRuns: &maxRuns,\n\t\t\t\t\tLimitStrategy: &strategy,\n\t\t\t\t},\n\t\t\t},\n\t\t}, func(ctx worker.HatchetContext, input ConcurrencyInput) (*TransformedOutput, error) {\n\t\t\t// Random sleep between 200ms and 1000ms\n\t\t\ttime.Sleep(time.Duration(200+rand.Intn(800)) * time.Millisecond)\n\n\t\t\treturn &TransformedOutput{\n\t\t\t\tTransformedMessage: input.Message,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn concurrency\n}\n", - "source": "out/go/workflows/concurrency-rr.go", - "blocks": { - "concurrency_strategy_with_key": { - "start": 27, - "stop": 49 - }, - "multiple_concurrency_keys": { - "start": 56, - "stop": 83 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/dag-with-conditions.ts b/frontend/docs/lib/generated/snips/go/workflows/dag-with-conditions.ts deleted file mode 100644 index ae9153184..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/dag-with-conditions.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype DagWithConditionsInput struct {\n\tMessage string\n}\n\ntype DagWithConditionsResult struct {\n\tStep1 SimpleOutput\n\tStep2 SimpleOutput\n}\n\ntype conditionOpts = create.WorkflowTask[DagWithConditionsInput, DagWithConditionsResult]\n\nfunc DagWithConditionsWorkflow(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[DagWithConditionsInput, DagWithConditionsResult] {\n\n\tsimple := factory.NewWorkflow[DagWithConditionsInput, DagWithConditionsResult](\n\t\tcreate.WorkflowCreateOpts[DagWithConditionsInput]{\n\t\t\tName: \"simple-dag\",\n\t\t},\n\t\thatchet,\n\t)\n\n\tstep1 := simple.Task(\n\t\tconditionOpts{\n\t\t\tName: \"Step1\",\n\t\t}, func(ctx worker.HatchetContext, input DagWithConditionsInput) (interface{}, error) {\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\tsimple.Task(\n\t\tconditionOpts{\n\t\t\tName: \"Step2\",\n\t\t\tParents: []create.NamedTask{\n\t\t\t\tstep1,\n\t\t\t},\n\t\t}, func(ctx worker.HatchetContext, input DagWithConditionsInput) (interface{}, error) {\n\n\t\t\tvar step1Output SimpleOutput\n\t\t\terr := ctx.ParentOutput(step1, &step1Output)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tfmt.Println(step1Output.Step)\n\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 2,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn simple\n}\n", - "source": "out/go/workflows/dag-with-conditions.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/dag.ts b/frontend/docs/lib/generated/snips/go/workflows/dag.ts deleted file mode 100644 index f5e2955e5..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/dag.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype DagInput struct {\n\tMessage string\n}\n\ntype SimpleOutput struct {\n\tStep int\n}\n\ntype DagResult struct {\n\tStep1 SimpleOutput\n\tStep2 SimpleOutput\n}\n\nfunc DagWorkflow(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[DagInput, DagResult] {\n\t// > Declaring a Workflow\n\tsimple := factory.NewWorkflow[DagInput, DagResult](\n\t\tcreate.WorkflowCreateOpts[DagInput]{\n\t\t\tName: \"simple-dag\",\n\n\t\t},\n\t\thatchet,\n\t)\n\n\t// > Defining a Task\n\tsimple.Task(\n\t\tcreate.WorkflowTask[DagInput, DagResult]{\n\t\t\tName: \"step\",\n\t\t}, func(ctx worker.HatchetContext, input DagInput) (interface{}, error) {\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\t// > Adding a Task with a parent\n\tstep1 := simple.Task(\n\t\tcreate.WorkflowTask[DagInput, DagResult]{\n\t\t\tName: \"step-1\",\n\t\t}, func(ctx worker.HatchetContext, input DagInput) (interface{}, error) {\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 1,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\tsimple.Task(\n\t\tcreate.WorkflowTask[DagInput, DagResult]{\n\t\t\tName: \"step-2\",\n\t\t\tParents: []create.NamedTask{\n\t\t\t\tstep1,\n\t\t\t},\n\t\t}, func(ctx worker.HatchetContext, input DagInput) (interface{}, error) {\n\t\t\t// Get the output of the parent task\n\t\t\tvar step1Output SimpleOutput\n\t\t\terr := ctx.ParentOutput(step1, &step1Output)\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &SimpleOutput{\n\t\t\t\tStep: 2,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn simple\n}\n", - "source": "out/go/workflows/dag.go", - "blocks": { - "declaring_a_workflow": { - "start": 26, - "stop": 32 - }, - "defining_a_task": { - "start": 35, - "stop": 43 - }, - "adding_a_task_with_a_parent": { - "start": 46, - "stop": 74 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/durable-event.ts b/frontend/docs/lib/generated/snips/go/workflows/durable-event.ts deleted file mode 100644 index 63f1a6700..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/durable-event.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype DurableEventInput struct {\n\tMessage string\n}\n\ntype EventData struct {\n\tMessage string\n}\n\ntype DurableEventOutput struct {\n\tData EventData\n}\n\nfunc DurableEvent(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[DurableEventInput, DurableEventOutput] {\n\t// > Durable Event\n\tdurableEventTask := factory.NewDurableTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"durable-event\",\n\t\t},\n\t\tfunc(ctx worker.DurableHatchetContext, input DurableEventInput) (*DurableEventOutput, error) {\n\t\t\teventData, err := ctx.WaitForEvent(\"user:update\", \"\")\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tv := EventData{}\n\t\t\terr = eventData.Unmarshal(&v)\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &DurableEventOutput{\n\t\t\t\tData: v,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\tfactory.NewDurableTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"durable-event\",\n\t\t},\n\t\tfunc(ctx worker.DurableHatchetContext, input DurableEventInput) (*DurableEventOutput, error) {\n\t\t\t// > Durable Event With Filter\n\t\t\teventData, err := ctx.WaitForEvent(\"user:update\", \"input.user_id == '1234'\")\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\tv := EventData{}\n\t\t\terr = eventData.Unmarshal(&v)\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &DurableEventOutput{\n\t\t\t\tData: v,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn durableEventTask\n}\n", - "source": "out/go/workflows/durable-event.go", - "blocks": { - "durable_event": { - "start": 25, - "stop": 48 - }, - "durable_event_with_filter": { - "start": 56, - "stop": 56 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/durable-sleep.ts b/frontend/docs/lib/generated/snips/go/workflows/durable-sleep.ts deleted file mode 100644 index 0f85528e0..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/durable-sleep.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"strings\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype DurableSleepInput struct {\n\tMessage string\n}\n\ntype DurableSleepOutput struct {\n\tTransformedMessage string\n}\n\nfunc DurableSleep(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[DurableSleepInput, DurableSleepOutput] {\n\t// > Durable Sleep\n\tsimple := factory.NewDurableTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"durable-sleep\",\n\t\t},\n\t\tfunc(ctx worker.DurableHatchetContext, input DurableSleepInput) (*DurableSleepOutput, error) {\n\t\t\t_, err := ctx.SleepFor(10 * time.Second)\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &DurableSleepOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn simple\n}\n", - "source": "out/go/workflows/durable-sleep.go", - "blocks": { - "durable_sleep": { - "start": 24, - "stop": 40 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/index.ts b/frontend/docs/lib/generated/snips/go/workflows/index.ts deleted file mode 100644 index dc4ad8ab3..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/index.ts +++ /dev/null @@ -1,37 +0,0 @@ -import cancellations from './cancellations'; -import child_workflows from './child-workflows'; -import complex_conditions from './complex-conditions'; -import concurrency_rr from './concurrency-rr'; -import dag_with_conditions from './dag-with-conditions'; -import dag from './dag'; -import durable_event from './durable-event'; -import durable_sleep from './durable-sleep'; -import non_retryable_error from './non-retryable-error'; -import on_cron from './on-cron'; -import on_event from './on-event'; -import on_failure from './on-failure'; -import priority from './priority'; -import ratelimit from './ratelimit'; -import retries from './retries'; -import simple from './simple'; -import sticky from './sticky'; -import timeouts from './timeouts'; - -export { cancellations } -export { child_workflows } -export { complex_conditions } -export { concurrency_rr } -export { dag_with_conditions } -export { dag } -export { durable_event } -export { durable_sleep } -export { non_retryable_error } -export { on_cron } -export { on_event } -export { on_failure } -export { priority } -export { ratelimit } -export { retries } -export { simple } -export { sticky } -export { timeouts } diff --git a/frontend/docs/lib/generated/snips/go/workflows/non-retryable-error.ts b/frontend/docs/lib/generated/snips/go/workflows/non-retryable-error.ts deleted file mode 100644 index 2d5556592..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/non-retryable-error.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"errors\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype NonRetryableInput struct{}\ntype NonRetryableResult struct{}\n\n// NonRetryableError returns a workflow which throws a non-retryable error\nfunc NonRetryableError(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[NonRetryableInput, NonRetryableResult] {\n\t// > Non Retryable Error\n\tretries := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"non-retryable-task\",\n\t\t\tRetries: 3,\n\t\t}, func(ctx worker.HatchetContext, input NonRetryableInput) (*NonRetryableResult, error) {\n\t\t\treturn nil, worker.NewNonRetryableError(errors.New(\"intentional failure\"))\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn retries\n}\n", - "source": "out/go/workflows/non-retryable-error.go", - "blocks": { - "non_retryable_error": { - "start": 19, - "stop": 27 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/on-cron.ts b/frontend/docs/lib/generated/snips/go/workflows/on-cron.ts deleted file mode 100644 index fde5b33f6..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/on-cron.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"strings\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype OnCronInput struct {\n\tMessage string `json:\"Message\"`\n}\n\ntype JobResult struct {\n\tTransformedMessage string `json:\"TransformedMessage\"`\n}\n\ntype OnCronOutput struct {\n\tJob JobResult `json:\"job\"`\n}\n\n// > Workflow Definition Cron Trigger\nfunc OnCron(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[OnCronInput, OnCronOutput] {\n\t// Create a standalone task that transforms a message\n\tcronTask := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"on-cron-task\",\n\t\t\t// 👀 add a cron expression\n\t\t\tOnCron: []string{\"0 0 * * *\"}, // Run every day at midnight\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input OnCronInput) (*OnCronOutput, error) {\n\t\t\treturn &OnCronOutput{\n\t\t\t\tJob: JobResult{\n\t\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t\t},\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn cronTask\n}\n\n", - "source": "out/go/workflows/on-cron.go", - "blocks": { - "workflow_definition_cron_trigger": { - "start": 26, - "stop": 46 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/on-event.ts b/frontend/docs/lib/generated/snips/go/workflows/on-event.ts deleted file mode 100644 index 94911605d..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/on-event.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"fmt\"\n\t\"strings\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype EventInput struct {\n\tMessage string\n}\n\ntype LowerTaskOutput struct {\n\tTransformedMessage string\n}\n\ntype UpperTaskOutput struct {\n\tTransformedMessage string\n}\n\n// > Run workflow on event\nconst SimpleEvent = \"simple-event:create\"\n\nfunc Lower(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, LowerTaskOutput] {\n\treturn factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"lower\",\n\t\t\t// 👀 Declare the event that will trigger the workflow\n\t\t\tOnEvents: []string{SimpleEvent},\n\t\t}, func(ctx worker.HatchetContext, input EventInput) (*LowerTaskOutput, error) {\n\t\t\t// Transform the input message to lowercase\n\t\t\treturn &LowerTaskOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n}\n\n\n// > Accessing the filter payload\nfunc accessFilterPayload(ctx worker.HatchetContext, input EventInput) (*LowerTaskOutput, error) {\n\tfmt.Println(ctx.FilterPayload())\n\treturn &LowerTaskOutput{\n\t\tTransformedMessage: strings.ToLower(input.Message),\n\t}, nil\n}\n\n\n// > Declare with filter\nfunc LowerWithFilter(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, LowerTaskOutput] {\n\treturn factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"lower\",\n\t\t\t// 👀 Declare the event that will trigger the workflow\n\t\t\tOnEvents: []string{SimpleEvent},\n\t\t\tDefaultFilters: []types.DefaultFilter{{\n\t\t\t\tExpression: \"true\",\n\t\t\t\tScope: \"example-scope\",\n\t\t\t\tPayload: map[string]interface{}{\n\t\t\t\t\t\"main_character\": \"Anna\",\n\t\t\t\t\t\"supporting_character\": \"Stiva\",\n\t\t\t\t\t\"location\": \"Moscow\"},\n\t\t\t}},\n\t\t}, accessFilterPayload,\n\t\thatchet,\n\t)\n}\n\n\nfunc Upper(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, UpperTaskOutput] {\n\treturn factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"upper\",\n\t\t\tOnEvents: []string{SimpleEvent},\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input EventInput) (*UpperTaskOutput, error) {\n\t\t\treturn &UpperTaskOutput{\n\t\t\t\tTransformedMessage: strings.ToUpper(input.Message),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n}\n", - "source": "out/go/workflows/on-event.go", - "blocks": { - "run_workflow_on_event": { - "start": 28, - "stop": 45 - }, - "accessing_the_filter_payload": { - "start": 48, - "stop": 54 - }, - "declare_with_filter": { - "start": 57, - "stop": 75 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/on-failure.ts b/frontend/docs/lib/generated/snips/go/workflows/on-failure.ts deleted file mode 100644 index 7dbf0bbd4..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/on-failure.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"errors\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype AlwaysFailsOutput struct {\n\tTransformedMessage string\n}\n\ntype OnFailureOutput struct {\n\tFailureRan bool\n}\n\ntype OnFailureSuccessResult struct {\n\tAlwaysFails AlwaysFailsOutput\n}\n\nfunc OnFailure(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[any, OnFailureSuccessResult] {\n\n\tsimple := factory.NewWorkflow[any, OnFailureSuccessResult](\n\t\tcreate.WorkflowCreateOpts[any]{\n\t\t\tName: \"on-failure\",\n\t\t},\n\t\thatchet,\n\t)\n\n\tsimple.Task(\n\t\tcreate.WorkflowTask[any, OnFailureSuccessResult]{\n\t\t\tName: \"AlwaysFails\",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, _ any) (interface{}, error) {\n\t\t\treturn &AlwaysFailsOutput{\n\t\t\t\tTransformedMessage: \"always fails\",\n\t\t\t}, errors.New(\"always fails\")\n\t\t},\n\t)\n\n\tsimple.OnFailure(\n\t\tcreate.WorkflowOnFailureTask[any, OnFailureSuccessResult]{},\n\t\tfunc(ctx worker.HatchetContext, _ any) (interface{}, error) {\n\t\t\treturn &OnFailureOutput{\n\t\t\t\tFailureRan: true,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn simple\n}\n", - "source": "out/go/workflows/on-failure.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/priority.ts b/frontend/docs/lib/generated/snips/go/workflows/priority.ts deleted file mode 100644 index 7b81d0781..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/priority.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype PriorityInput struct {\n\tUserId string `json:\"userId\"`\n}\n\ntype PriorityOutput struct {\n\tTransformedMessage string `json:\"TransformedMessage\"`\n}\n\ntype Result struct {\n\tStep PriorityOutput\n}\n\nfunc Priority(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[PriorityInput, Result] {\n\t// Create a standalone task that transforms a message\n\n\t// > Default priority\n\tdefaultPriority := int32(1)\n\n\tworkflow := factory.NewWorkflow[PriorityInput, Result](\n\t\tcreate.WorkflowCreateOpts[PriorityInput]{\n\t\t\tName: \"priority\",\n\t\t\tDefaultPriority: &defaultPriority,\n\t\t},\n\t\thatchet,\n\t)\n\n\t// > Defining a Task\n\tworkflow.Task(\n\t\tcreate.WorkflowTask[PriorityInput, Result]{\n\t\t\tName: \"step\",\n\t\t}, func(ctx worker.HatchetContext, input PriorityInput) (interface{}, error) {\n\t\t\ttime.Sleep(time.Second * 5)\n\t\t\treturn &PriorityOutput{\n\t\t\t\tTransformedMessage: input.UserId,\n\t\t\t}, nil\n\t\t},\n\t)\n\treturn workflow\n}\n\n", - "source": "out/go/workflows/priority.go", - "blocks": { - "default_priority": { - "start": 29, - "stop": 37 - }, - "defining_a_task": { - "start": 40, - "stop": 49 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/ratelimit.ts b/frontend/docs/lib/generated/snips/go/workflows/ratelimit.ts deleted file mode 100644 index 88a8cbd1a..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/ratelimit.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"strings\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/features\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype RateLimitInput struct {\n\tUserId string `json:\"userId\"`\n}\n\ntype RateLimitOutput struct {\n\tTransformedMessage string `json:\"TransformedMessage\"`\n}\n\nfunc upsertRateLimit(hatchet v1.HatchetClient) {\n\t// > Upsert Rate Limit\n\thatchet.RateLimits().Upsert(\n\t\tfeatures.CreateRatelimitOpts{\n\t\t\tKey: \"api-service-rate-limit\",\n\t\t\tLimit: 10,\n\t\t\tDuration: types.Second,\n\t\t},\n\t)\n}\n\n// > Static Rate Limit\nfunc StaticRateLimit(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[RateLimitInput, RateLimitOutput] {\n\t// Create a standalone task that transforms a message\n\n\t// define the parameters for the rate limit\n\trateLimitKey := \"api-service-rate-limit\"\n\tunits := 1\n\n\trateLimitTask := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"rate-limit-task\",\n\t\t\t// 👀 add a static rate limit\n\t\t\tRateLimits: []*types.RateLimit{\n\t\t\t\t{\n\t\t\t\t\tKey: rateLimitKey,\n\t\t\t\t\tUnits: &units,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input RateLimitInput) (*RateLimitOutput, error) {\n\t\t\treturn &RateLimitOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.UserId),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn rateLimitTask\n}\n\n\n// > Dynamic Rate Limit\nfunc RateLimit(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[RateLimitInput, RateLimitOutput] {\n\t// Create a standalone task that transforms a message\n\n\t// define the parameters for the rate limit\n\texpression := \"input.userId\"\n\tunits := 1\n\tduration := types.Second\n\n\trateLimitTask := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"rate-limit-task\",\n\t\t\t// 👀 add a dynamic rate limit\n\t\t\tRateLimits: []*types.RateLimit{\n\t\t\t\t{\n\t\t\t\t\tKeyExpr: &expression,\n\t\t\t\t\tUnits: &units,\n\t\t\t\t\tDuration: &duration,\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input RateLimitInput) (*RateLimitOutput, error) {\n\t\t\treturn &RateLimitOutput{\n\t\t\t\tTransformedMessage: strings.ToLower(input.UserId),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn rateLimitTask\n}\n\n", - "source": "out/go/workflows/ratelimit.go", - "blocks": { - "upsert_rate_limit": { - "start": 25, - "stop": 31 - }, - "static_rate_limit": { - "start": 35, - "stop": 63 - }, - "dynamic_rate_limit": { - "start": 66, - "stop": 96 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/retries.ts b/frontend/docs/lib/generated/snips/go/workflows/retries.ts deleted file mode 100644 index 648083151..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/retries.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"errors\"\n\t\"fmt\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype RetriesInput struct{}\ntype RetriesResult struct{}\n\n// Simple retries example that always fails\nfunc Retries(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[RetriesInput, RetriesResult] {\n\t// > Simple Step Retries\n\tretries := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"retries-task\",\n\t\t\tRetries: 3,\n\t\t}, func(ctx worker.HatchetContext, input RetriesInput) (*RetriesResult, error) {\n\t\t\treturn nil, errors.New(\"intentional failure\")\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn retries\n}\n\ntype RetriesWithCountInput struct{}\ntype RetriesWithCountResult struct {\n\tMessage string `json:\"message\"`\n}\n\n// Retries example that succeeds after a certain number of retries\nfunc RetriesWithCount(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[RetriesWithCountInput, RetriesWithCountResult] {\n\t// > Retries with Count\n\tretriesWithCount := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"fail-twice-task\",\n\t\t\tRetries: 3,\n\t\t}, func(ctx worker.HatchetContext, input RetriesWithCountInput) (*RetriesWithCountResult, error) {\n\t\t\t// Get the current retry count\n\t\t\tretryCount := ctx.RetryCount()\n\n\t\t\tfmt.Printf(\"Retry count: %d\\n\", retryCount)\n\n\t\t\tif retryCount < 2 {\n\t\t\t\treturn nil, errors.New(\"intentional failure\")\n\t\t\t}\n\n\t\t\treturn &RetriesWithCountResult{\n\t\t\t\tMessage: \"success\",\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn retriesWithCount\n}\n\ntype BackoffInput struct{}\ntype BackoffResult struct{}\n\n// Retries example with simple backoff (no configuration in this API version)\nfunc WithBackoff(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[BackoffInput, BackoffResult] {\n\t// > Retries with Backoff\n\twithBackoff := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"with-backoff-task\",\n\t\t\t// 👀 Maximum number of seconds to wait between retries\n\t\t\tRetries: 3,\n\t\t\t// 👀 Factor to increase the wait time between retries.\n\t\t\tRetryBackoffFactor: 2,\n\t\t\t// 👀 Maximum number of seconds to wait between retries\n\t\t\t// This sequence will be 2s, 4s, 8s, 10s, 10s, 10s... due to the maxSeconds limit\n\t\t\tRetryMaxBackoffSeconds: 10,\n\t\t}, func(ctx worker.HatchetContext, input BackoffInput) (*BackoffResult, error) {\n\t\t\treturn nil, errors.New(\"intentional failure\")\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn withBackoff\n}\n", - "source": "out/go/workflows/retries.go", - "blocks": { - "simple_step_retries": { - "start": 20, - "stop": 28 - }, - "retries_with_count": { - "start": 41, - "stop": 60 - }, - "retries_with_backoff": { - "start": 71, - "stop": 85 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/simple.ts b/frontend/docs/lib/generated/snips/go/workflows/simple.ts deleted file mode 100644 index e3204edb0..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/simple.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"strings\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\tv1worker \"github.com/hatchet-dev/hatchet/pkg/v1/worker\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype SimpleInput struct {\n\tMessage string\n}\ntype SimpleResult struct {\n\tTransformedMessage string\n}\n\nfunc Simple(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[SimpleInput, SimpleResult] {\n\n\t// Create a simple standalone task using the task factory\n\t// Note the use of typed generics for both input and output\n\n\t// > Declaring a Task\n\tsimple := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"simple-task\",\n\t\t}, func(ctx worker.HatchetContext, input SimpleInput) (*SimpleResult, error) {\n\t\t\t// Transform the input message to lowercase\n\t\t\treturn &SimpleResult{\n\t\t\t\tTransformedMessage: strings.ToLower(input.Message),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\t// Example of running a task\n\t_ = func() error {\n\t\t// > Running a Task\n\t\tresult, err := simple.Run(context.Background(), SimpleInput{Message: \"Hello, World!\"})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\tfmt.Println(result.TransformedMessage)\n\t\treturn nil\n\t}\n\n\t// Example of registering a task on a worker\n\t_ = func() error {\n\t\t// > Declaring a Worker\n\t\tw, err := hatchet.Worker(v1worker.WorkerOpts{\n\t\t\tName: \"simple-worker\",\n\t\t\tWorkflows: []workflow.WorkflowBase{\n\t\t\t\tsimple,\n\t\t\t},\n\t\t})\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\terr = w.StartBlocking(context.Background())\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t\treturn nil\n\t}\n\n\treturn simple\n}\n\nfunc ParentTask(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[SimpleInput, SimpleResult] {\n\n\t// > Spawning Tasks from within a Task\n\tsimple := Simple(hatchet)\n\n\tparent := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"parent-task\",\n\t\t}, func(ctx worker.HatchetContext, input SimpleInput) (*SimpleResult, error) {\n\n\t\t\t// Run the child task\n\t\t\tchild, err := workflow.RunChildWorkflow(ctx, simple, SimpleInput{Message: input.Message})\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\t// Transform the input message to lowercase\n\t\t\treturn &SimpleResult{\n\t\t\t\tTransformedMessage: child.TransformedMessage,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn parent\n}\n", - "source": "out/go/workflows/simple.go", - "blocks": { - "declaring_a_task": { - "start": 29, - "stop": 39 - }, - "running_a_task": { - "start": 44, - "stop": 48 - }, - "declaring_a_worker": { - "start": 55, - "stop": 67 - }, - "spawning_tasks_from_within_a_task": { - "start": 77, - "stop": 96 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/sticky.ts b/frontend/docs/lib/generated/snips/go/workflows/sticky.ts deleted file mode 100644 index d26654f83..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/sticky.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype StickyInput struct{}\n\ntype StickyResult struct {\n\tResult string `json:\"result\"`\n}\n\ntype StickyDagResult struct {\n\tStickyTask1 StickyResult `json:\"sticky-task-1\"`\n\tStickyTask2 StickyResult `json:\"sticky-task-2\"`\n}\n\nfunc StickyDag(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[StickyInput, StickyDagResult] {\n\tstickyDag := factory.NewWorkflow[StickyInput, StickyDagResult](\n\t\tcreate.WorkflowCreateOpts[StickyInput]{\n\t\t\tName: \"sticky-dag\",\n\t\t},\n\t\thatchet,\n\t)\n\n\tstickyDag.Task(\n\t\tcreate.WorkflowTask[StickyInput, StickyDagResult]{\n\t\t\tName: \"sticky-task\",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input StickyInput) (interface{}, error) {\n\t\t\tworkerId := ctx.Worker().ID()\n\n\t\t\treturn &StickyResult{\n\t\t\t\tResult: workerId,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\tstickyDag.Task(\n\t\tcreate.WorkflowTask[StickyInput, StickyDagResult]{\n\t\t\tName: \"sticky-task-2\",\n\t\t},\n\t\tfunc(ctx worker.HatchetContext, input StickyInput) (interface{}, error) {\n\t\t\tworkerId := ctx.Worker().ID()\n\n\t\t\treturn &StickyResult{\n\t\t\t\tResult: workerId,\n\t\t\t}, nil\n\t\t},\n\t)\n\n\treturn stickyDag\n}\n\nfunc Sticky(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[StickyInput, StickyResult] {\n\tsticky := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"sticky-task\",\n\t\t\tRetries: 3,\n\t\t}, func(ctx worker.HatchetContext, input StickyInput) (*StickyResult, error) {\n\t\t\t// Run a child workflow on the same worker\n\t\t\tchildWorkflow := Child(hatchet)\n\t\t\tsticky := true\n\t\t\tchildResult, err := childWorkflow.RunAsChild(ctx, ChildInput{N: 1}, workflow.RunAsChildOpts{\n\t\t\t\tSticky: &sticky,\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\n\t\t\treturn &StickyResult{\n\t\t\t\tResult: fmt.Sprintf(\"child-result-%d\", childResult.Value),\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn sticky\n}\n", - "source": "out/go/workflows/sticky.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/workflows/timeouts.ts b/frontend/docs/lib/generated/snips/go/workflows/timeouts.ts deleted file mode 100644 index 9d99ae419..000000000 --- a/frontend/docs/lib/generated/snips/go/workflows/timeouts.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package v1_workflows\n\nimport (\n\t\"errors\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client/create\"\n\tv1 \"github.com/hatchet-dev/hatchet/pkg/v1\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/factory\"\n\t\"github.com/hatchet-dev/hatchet/pkg/v1/workflow\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype TimeoutInput struct{}\ntype TimeoutResult struct {\n\tCompleted bool\n}\n\nfunc Timeout(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[TimeoutInput, TimeoutResult] {\n\n\t// > Execution Timeout\n\t// Create a task with a timeout of 3 seconds that tries to sleep for 10 seconds\n\ttimeout := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"timeout-task\",\n\t\t\tExecutionTimeout: 3 * time.Second, // Task will timeout after 3 seconds\n\t\t}, func(ctx worker.HatchetContext, input TimeoutInput) (*TimeoutResult, error) {\n\t\t\t// Sleep for 10 seconds\n\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t// Check if the context was cancelled due to timeout\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil, errors.New(\"TASK TIMED OUT\")\n\t\t\tdefault:\n\t\t\t\t// Continue execution\n\t\t\t}\n\n\t\t\treturn &TimeoutResult{\n\t\t\t\tCompleted: true,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn timeout\n}\n\nfunc RefreshTimeout(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[TimeoutInput, TimeoutResult] {\n\n\t// > Refresh Timeout\n\ttimeout := factory.NewTask(\n\t\tcreate.StandaloneTask{\n\t\t\tName: \"timeout-task\",\n\t\t\tExecutionTimeout: 3 * time.Second, // Task will timeout after 3 seconds\n\t\t}, func(ctx worker.HatchetContext, input TimeoutInput) (*TimeoutResult, error) {\n\n\t\t\t// Refresh the timeout by 10 seconds (new timeout will be 13 seconds)\n\t\t\tctx.RefreshTimeout(\"10s\")\n\n\t\t\t// Sleep for 10 seconds\n\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t// Check if the context was cancelled due to timeout\n\t\t\tselect {\n\t\t\tcase <-ctx.Done():\n\t\t\t\treturn nil, errors.New(\"TASK TIMED OUT\")\n\t\t\tdefault:\n\t\t\t\t// Continue execution\n\t\t\t}\n\n\t\t\treturn &TimeoutResult{\n\t\t\t\tCompleted: true,\n\t\t\t}, nil\n\t\t},\n\t\thatchet,\n\t)\n\n\treturn timeout\n}\n", - "source": "out/go/workflows/timeouts.go", - "blocks": { - "execution_timeout": { - "start": 22, - "stop": 44 - }, - "refresh_timeout": { - "start": 52, - "stop": 77 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/index.ts deleted file mode 100644 index a265089e4..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main } -export { run } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/main.ts deleted file mode 100644 index 970bdbfdd..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\tcleanup, err := run()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"cleanup() error = %v\", err))\n\t}\n}\n", - "source": "out/go/z_v0/assignment-affinity/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/run.ts deleted file mode 100644 index f8b8af1eb..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithLabels(map[string]interface{}{\n\t\t\t\"model\": \"fancy-ai-model-v2\",\n\t\t\t\"memory\": 1024,\n\t\t}),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:create:affinity\"),\n\t\t\tName: \"affinity\",\n\t\t\tDescription: \"affinity\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tmodel := ctx.Worker().GetLabels()[\"model\"]\n\n\t\t\t\t\tif model != \"fancy-ai-model-v3\" {\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t\"model\": nil,\n\t\t\t\t\t\t})\n\t\t\t\t\t\t// Do something to load the model\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t\"model\": \"fancy-ai-model-v3\",\n\t\t\t\t\t\t})\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).\n\t\t\t\t\tSetName(\"step-one\").\n\t\t\t\t\tSetDesiredLabels(map[string]*types.DesiredWorkerLabel{\n\t\t\t\t\t\t\"model\": {\n\t\t\t\t\t\t\tValue: \"fancy-ai-model-v3\",\n\t\t\t\t\t\t\tWeight: 10,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t\"memory\": {\n\t\t\t\t\t\t\tValue: 512,\n\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\tComparator: types.ComparatorPtr(types.WorkerLabelComparator_GREATER_THAN),\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\"pushing event\")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:create:affinity\",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/assignment-affinity/run.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/index.ts deleted file mode 100644 index a265089e4..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main } -export { run } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/main.ts deleted file mode 100644 index adf3f178e..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\tcleanup, err := run()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"cleanup() error = %v\", err))\n\t}\n}\n", - "source": "out/go/z_v0/assignment-sticky/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/run.ts deleted file mode 100644 index 34720499e..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/run.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\t// > StickyWorker\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:create:sticky\"),\n\t\t\tName: \"sticky\",\n\t\t\tDescription: \"sticky\",\n\t\t\t// 👀 Specify a sticky strategy when declaring the workflow\n\t\t\tStickyStrategy: types.StickyStrategyPtr(types.StickyStrategy_HARD),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tsticky := true\n\n\t\t\t\t\t_, err = ctx.SpawnWorkflow(\"sticky-child\", nil, &worker.SpawnWorkflowOpts{\n\t\t\t\t\t\tSticky: &sticky,\n\t\t\t\t\t})\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"error spawning workflow: %w\", err)\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\").AddParents(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-three\").AddParents(\"step-two\"),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\t// > StickyChild\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: \"sticky-child\",\n\t\t\tDescription: \"sticky\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\"pushing event\")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:create:sticky\",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/assignment-sticky/run.go", - "blocks": { - "stickyworker": { - "start": 30, - "stop": 68 - }, - "stickychild": { - "start": 75, - "stop": 90 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/bulk_imports/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/bulk_imports/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/bulk_imports/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/bulk_imports/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/bulk_imports/main.ts deleted file mode 100644 index 9841795e0..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/bulk_imports/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t_, err = run()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n}\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\terr = testSvc.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:create:bulk\"),\n\t\t\tName: \"bulk\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-one\")\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Username is: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tvar events []client.EventWithAdditionalMetadata\n\n\t// 20000 times to test the bulk push\n\n\tfor i := 0; i < 20000; i++ {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234 \" + fmt.Sprint(i),\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test \" + fmt.Sprint(i),\n\t\t\t},\n\t\t}\n\t\tevents = append(events, client.EventWithAdditionalMetadata{\n\t\t\tEvent: testEvent,\n\t\t\tAdditionalMetadata: map[string]string{\"hello\": \"world \" + fmt.Sprint(i)},\n\t\t\tKey: \"user:create:bulk\",\n\t\t})\n\t}\n\n\tlog.Printf(\"pushing event user:create:bulk\")\n\n\terr = c.Event().BulkPush(\n\t\tcontext.Background(),\n\t\tevents,\n\t)\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t}\n\n\treturn nil, nil\n\n}\n", - "source": "out/go/z_v0/bulk_imports/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/index.ts deleted file mode 100644 index a265089e4..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main } -export { run } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/main.ts deleted file mode 100644 index b65bffe15..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\tworkflowName := \"simple-bulk-workflow\"\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error creating client: %w\", err))\n\t}\n\n\t_, err = registerWorkflow(c, workflowName)\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error registering workflow: %w\", err))\n\t}\n\n\tquantity := 999\n\n\toverallStart := time.Now()\n\titerations := 10\n\tfor i := 0; i < iterations; i++ {\n\t\tstartTime := time.Now()\n\n\t\tfmt.Printf(\"Running the %dth bulk workflow \\n\", i)\n\n\t\terr = runBulk(workflowName, quantity)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t\tfmt.Printf(\"Time taken to queue %dth bulk workflow: %v\\n\", i, time.Since(startTime))\n\t}\n\tfmt.Println(\"Overall time taken: \", time.Since(overallStart))\n\tfmt.Printf(\"That is %d workflows per second\\n\", int(float64(quantity*iterations)/time.Since(overallStart).Seconds()))\n\tfmt.Println(\"Starting the worker\")\n\n\t// err = runSingles(workflowName, quantity)\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error creating client: %w\", err))\n\t}\n\n\t// I want to start the wofklow worker here\n\n\tw, err := registerWorkflow(c, workflowName)\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error creating worker: %w\", err))\n\t}\n\n\tcleanup, err := w.Start()\n\tfmt.Println(\"Starting the worker\")\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error starting worker: %w\", err))\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \"my-key\", nil\n}\n\nfunc registerWorkflow(c client.Client, workflowName string) (w *worker.Worker, err error) {\n\n\tw, err = worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:create:bulk-simple\"),\n\t\t\tName: workflowName,\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(200).LimitStrategy(types.GroupRoundRobin),\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-one\")\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Username is: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\"step-one\", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-two\")\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Above message is: \" + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\").AddParents(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\treturn w, nil\n}\n", - "source": "out/go/z_v0/bulk_workflows/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/run.ts deleted file mode 100644 index 40747ae7e..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/bulk_workflows/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n)\n\nfunc runBulk(workflowName string, quantity int) error {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tlog.Printf(\"pushing %d workflows in bulk\", quantity)\n\n\tvar workflows []*client.WorkflowRun\n\tfor i := 0; i < quantity; i++ {\n\t\tdata := map[string]interface{}{\n\t\t\t\"username\": fmt.Sprintf(\"echo-test-%d\", i),\n\t\t\t\"user_id\": fmt.Sprintf(\"1234-%d\", i),\n\t\t}\n\t\tworkflows = append(workflows, &client.WorkflowRun{\n\t\t\tName: workflowName,\n\t\t\tInput: data,\n\t\t\tOptions: []client.RunOptFunc{\n\t\t\t\t// setting a dedupe key so these shouldn't all run\n\t\t\t\tclient.WithRunMetadata(map[string]interface{}{\n\t\t\t\t\t// \"dedupe\": \"dedupe1\",\n\t\t\t\t}),\n\t\t\t},\n\t\t})\n\n\t}\n\n\touts, err := c.Admin().BulkRunWorkflow(workflows)\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t}\n\n\tfor _, out := range outs {\n\t\tlog.Printf(\"workflow run id: %v\", out)\n\t}\n\n\treturn nil\n\n}\n\nfunc runSingles(workflowName string, quantity int) error {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tlog.Printf(\"pushing %d single workflows\", quantity)\n\n\tvar workflows []*client.WorkflowRun\n\tfor i := 0; i < quantity; i++ {\n\t\tdata := map[string]interface{}{\n\t\t\t\"username\": fmt.Sprintf(\"echo-test-%d\", i),\n\t\t\t\"user_id\": fmt.Sprintf(\"1234-%d\", i),\n\t\t}\n\t\tworkflows = append(workflows, &client.WorkflowRun{\n\t\t\tName: workflowName,\n\t\t\tInput: data,\n\t\t\tOptions: []client.RunOptFunc{\n\t\t\t\tclient.WithRunMetadata(map[string]interface{}{\n\t\t\t\t\t// \"dedupe\": \"dedupe1\",\n\t\t\t\t}),\n\t\t\t},\n\t\t})\n\t}\n\n\tfor _, wf := range workflows {\n\n\t\tgo func() {\n\t\t\tout, err := c.Admin().RunWorkflow(wf.Name, wf.Input, wf.Options...)\n\t\t\tif err != nil {\n\t\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t\t}\n\n\t\t\tlog.Printf(\"workflow run id: %v\", out)\n\t\t}()\n\n\t}\n\n\treturn nil\n}\n", - "source": "out/go/z_v0/bulk_workflows/run.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/cancellation/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/cancellation/index.ts deleted file mode 100644 index a265089e4..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/cancellation/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main } -export { run } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/cancellation/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/cancellation/main.ts deleted file mode 100644 index ff0554e8e..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/cancellation/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tch := cmdutils.InterruptChan()\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"cleanup() error = %v\", err))\n\t}\n}\n", - "source": "out/go/z_v0/cancellation/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/cancellation/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/cancellation/run.ts deleted file mode 100644 index 2e5d1646a..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/cancellation/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/google/uuid\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/rest\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:create:cancellation\"),\n\t\t\tName: \"cancellation\",\n\t\t\tDescription: \"cancellation\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tselect {\n\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\tevents <- \"done\"\n\t\t\t\t\t\tlog.Printf(\"context cancelled\")\n\t\t\t\t\t\treturn nil, nil\n\t\t\t\t\tcase <-time.After(30 * time.Second):\n\t\t\t\t\t\tlog.Printf(\"workflow never cancelled\")\n\t\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\t\tMessage: \"done\",\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t}\n\t\t\t\t}).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\"pushing event\")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:create:cancellation\",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\n\t\tworkflowName := \"cancellation\"\n\n\t\tworkflows, err := c.API().WorkflowListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowListParams{\n\t\t\tName: &workflowName,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error listing workflows: %w\", err))\n\t\t}\n\n\t\tif workflows.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf(\"no workflows found\"))\n\t\t}\n\n\t\trows := *workflows.JSON200.Rows\n\n\t\tif len(rows) == 0 {\n\t\t\tpanic(fmt.Errorf(\"no workflows found\"))\n\t\t}\n\n\t\tworkflowId := uuid.MustParse(rows[0].Metadata.Id)\n\n\t\tworkflowRuns, err := c.API().WorkflowRunListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowRunListParams{\n\t\t\tWorkflowId: &workflowId,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error listing workflow runs: %w\", err))\n\t\t}\n\n\t\tif workflowRuns.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf(\"no workflow runs found\"))\n\t\t}\n\n\t\tworkflowRunsRows := *workflowRuns.JSON200.Rows\n\n\t\t_, err = c.API().WorkflowRunCancelWithResponse(context.Background(), uuid.MustParse(c.TenantId()), rest.WorkflowRunsCancelRequest{\n\t\t\tWorkflowRunIds: []uuid.UUID{uuid.MustParse(workflowRunsRows[0].Metadata.Id)},\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error cancelling workflow run: %w\", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/cancellation/run.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/compute/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/compute/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/compute/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/compute/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/compute/main.ts deleted file mode 100644 index d1bb3211c..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/compute/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/compute\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\tpool := \"test-pool\"\n\tbasicCompute := compute.Compute{\n\t\tPool: &pool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 1,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindSharedCPU,\n\t\tRegions: []compute.Region{compute.Region(\"ewr\")},\n\t}\n\n\tperformancePool := \"performance-pool\"\n\tperformanceCompute := compute.Compute{\n\t\tPool: &performancePool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 2,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindPerformanceCPU,\n\t\tRegions: []compute.Region{compute.Region(\"ewr\")},\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:create:simple\"),\n\t\t\tName: \"simple\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tConcurrency: worker.Expression(\"input.user_id\"),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-one\")\n\t\t\t\t\tevents <- \"step-one\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Username is: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\").SetCompute(&basicCompute),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\"step-one\", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-two\")\n\t\t\t\t\tevents <- \"step-two\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Above message is: \" + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\").AddParents(\"step-one\").SetCompute(&performanceCompute),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\"pushing event user:create:simple\")\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:create:simple\",\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\"hello\": \"world\",\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/compute/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/concurrency/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/concurrency/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/concurrency/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/concurrency/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/concurrency/main.ts deleted file mode 100644 index 926023583..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/concurrency/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:create:concurrency\"),\n\t\t\tName: \"simple-concurrency\",\n\t\t\tDescription: \"This runs to test concurrency.\",\n\t\t\tConcurrency: worker.Expression(\"'concurrency'\").MaxRuns(1).LimitStrategy(types.GroupRoundRobin),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\t// we sleep to simulate a long running task\n\t\t\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t\t\tif err != nil {\n\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tif ctx.Err() != nil {\n\t\t\t\t\t\treturn nil, ctx.Err()\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-one\")\n\t\t\t\t\tevents <- \"step-one\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Username is: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\"step-one\", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tif ctx.Err() != nil {\n\t\t\t\t\t\treturn nil, ctx.Err()\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-two\")\n\t\t\t\t\tevents <- \"step-two\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Above message is: \" + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\").AddParents(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\ttestEvent := userCreateEvent{\n\t\tUsername: \"echo-test\",\n\t\tUserID: \"1234\",\n\t\tData: map[string]string{\n\t\t\t\"test\": \"test\",\n\t\t},\n\t}\n\tgo func() {\n\t\t// do this 10 times to test concurrency\n\t\tfor i := 0; i < 10; i++ {\n\n\t\t\twfr_id, err := c.Admin().RunWorkflow(\"simple-concurrency\", testEvent)\n\n\t\t\tlog.Println(\"Starting workflow run id: \", wfr_id)\n\n\t\t\tif err != nil {\n\t\t\t\tpanic(fmt.Errorf(\"error running workflow: %w\", err))\n\t\t\t}\n\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/concurrency/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/cron-programmatic/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/cron-programmatic/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/cron-programmatic/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/cron-programmatic/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/cron-programmatic/main.ts deleted file mode 100644 index 591c469b2..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/cron-programmatic/main.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\n// > Create\n// ... normal workflow definition\ntype printOutput struct{}\n\nfunc print(ctx context.Context) (result *printOutput, err error) {\n\tfmt.Println(\"called print:print\")\n\n\treturn &printOutput{}, nil\n}\n\n// ,\nfunc main() {\n\t// ... initialize client, worker and workflow\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: \"cron-workflow\",\n\t\t\tDescription: \"Demonstrates a simple cron workflow\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(print),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ,\n\n\tgo func() {\n\t\t// 👀 define the cron expression to run every minute\n\t\tcron, err := c.Cron().Create(\n\t\t\tcontext.Background(),\n\t\t\t\"cron-workflow\",\n\t\t\t&client.CronOpts{\n\t\t\t\tName: \"every-minute\",\n\t\t\t\tExpression: \"* * * * *\",\n\t\t\t\tInput: map[string]interface{}{\n\t\t\t\t\t\"message\": \"Hello, world!\",\n\t\t\t\t},\n\t\t\t\tAdditionalMetadata: map[string]string{},\n\t\t\t},\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tfmt.Println(*cron.Name, cron.Cron)\n\t}()\n\n\t// ... wait for interrupt signal\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n\t// ,\n}\n\n\nfunc ListCrons() {\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > List\n\tcrons, err := c.Cron().List(context.Background())\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor _, cron := range *crons.Rows {\n\t\tfmt.Println(cron.Cron, *cron.Name)\n\t}\n}\n\nfunc DeleteCron(id string) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > Delete\n\t// 👀 id is the cron's metadata id, can get it via cron.Metadata.Id\n\terr = c.Cron().Delete(context.Background(), id)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n}\n", - "source": "out/go/z_v0/cron-programmatic/main.go", - "blocks": { - "create": { - "start": 15, - "stop": 106 - }, - "list": { - "start": 117, - "stop": 117 - }, - "delete": { - "start": 136, - "stop": 137 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/cron/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/cron/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/cron/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/cron/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/cron/main.ts deleted file mode 100644 index ec541010c..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/cron/main.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\n// > Workflow Definition Cron Trigger\n// ... normal workflow definition\ntype printOutput struct{}\n\nfunc print(ctx context.Context) (result *printOutput, err error) {\n\tfmt.Println(\"called print:print\")\n\n\treturn &printOutput{}, nil\n}\n\n// ,\nfunc main() {\n\t// ... initialize client and worker\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ,\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\t// 👀 define the cron expression to run every minute\n\t\t\tOn: worker.Cron(\"* * * * *\"),\n\t\t\tName: \"cron-workflow\",\n\t\t\tDescription: \"Demonstrates a simple cron workflow\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(print),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ... start worker\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n\t// ,\n}\n\n", - "source": "out/go/z_v0/cron/main.go", - "blocks": { - "workflow_definition_cron_trigger": { - "start": 15, - "stop": 84 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/dag/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/dag/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/dag/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/dag/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/dag/main.ts deleted file mode 100644 index c477d5469..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/dag/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\terr = testSvc.On(\n\t\tworker.Events(\"user:create:simple\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"post-user-update\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(1 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 1 got username: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 2 got username: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-one\", step1Out)\n\n\t\t\t\t\tstep2Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-two\", step2Out)\n\n\t\t\t\t\ttime.Sleep(3 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Username was: \" + input.Username + \", Step 3: has parents 1 and 2\" + step1Out.Message + \", \" + step2Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-three\").AddParents(\"step-one\", \"step-two\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-one\", step1Out)\n\n\t\t\t\t\tstep3Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-three\", step3Out)\n\n\t\t\t\t\ttime.Sleep(4 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 4: has parents 1 and 3\" + step1Out.Message + \", \" + step3Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-four\").AddParents(\"step-one\", \"step-three\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep4Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-four\", step4Out)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 5: has parent 4\" + step4Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-five\").AddParents(\"step-four\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \"echo-test\",\n\t\tUserID: \"1234\",\n\t\tData: map[string]string{\n\t\t\t\"test\": \"test\",\n\t\t},\n\t}\n\n\tlog.Printf(\"pushing event user:create:simple\")\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t\"user:create:simple\",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error pushing event: %w\", err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn cleanup()\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", - "source": "out/go/z_v0/dag/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/index.ts deleted file mode 100644 index c9ede1896..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import * as requeue from './requeue'; -import * as schedule_timeout from './schedule-timeout'; -import * as timeout from './timeout'; -import * as yaml from './yaml'; - -export { requeue }; -export { schedule_timeout }; -export { timeout }; -export { yaml }; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/.hatchet/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/.hatchet/index.ts deleted file mode 100644 index 08926d442..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/.hatchet/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import job_requeue_workflow from './job-requeue-workflow'; - -export { job_requeue_workflow } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.ts deleted file mode 100644 index 25c8fbc96..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "name: \"test-step-requeue\"\nversion: v0.2.0\ntriggers:\n events:\n - example:event\njobs:\n requeue-job:\n steps:\n - id: requeue\n action: requeue:requeue\n timeout: 10s\n", - "source": "out/go/z_v0/deprecated/requeue/.hatchet/job-requeue-workflow.yaml", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/index.ts deleted file mode 100644 index 58d44f5cd..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import * as hatchet from './.hatchet'; - -export { main } -export { hatchet }; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/main.ts deleted file mode 100644 index e6b9fb303..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype sampleEvent struct{}\n\ntype requeueInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction(\"requeue:requeue\", func(ctx context.Context, input *requeueInput) (result any, err error) {\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\"example:event\",\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// wait to register the worker for 10 seconds, to let the requeuer kick in\n\ttime.Sleep(10 * time.Second)\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", - "source": "out/go/z_v0/deprecated/requeue/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/index.ts deleted file mode 100644 index 5eef132d5..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import schedule_timeout_workflow from './schedule-timeout-workflow'; - -export { schedule_timeout_workflow } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.ts deleted file mode 100644 index 30dafdea8..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "name: \"test-schedule-timeout\"\nversion: v0.1.0\ntriggers:\n events:\n - user:create\njobs:\n timeout-job:\n steps:\n - id: timeout\n action: timeout:timeout\n", - "source": "out/go/z_v0/deprecated/schedule-timeout/.hatchet/schedule-timeout-workflow.yaml", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/index.ts deleted file mode 100644 index 58d44f5cd..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import * as hatchet from './.hatchet'; - -export { main } -export { hatchet }; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts deleted file mode 100644 index 8266fa3f5..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/joho/godotenv\"\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\"user:create\",\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttime.Sleep(35 * time.Second)\n\n\tfmt.Println(\"step should have timed out\")\n}\n", - "source": "out/go/z_v0/deprecated/schedule-timeout/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/index.ts deleted file mode 100644 index a60eef383..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import job_timeout_workflow from './job-timeout-workflow'; -import step_timeout_workflow from './step-timeout-workflow'; - -export { job_timeout_workflow } -export { step_timeout_workflow } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.ts deleted file mode 100644 index 778bf69b0..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "name: \"test-job-timeout\"\nversion: v0.1.0\ntriggers:\n events:\n - user:create\njobs:\n timeout-job:\n timeout: 3s\n steps:\n - id: timeout\n action: timeout:timeout\n timeout: 10s\n", - "source": "out/go/z_v0/deprecated/timeout/.hatchet/job-timeout-workflow.yaml", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.ts deleted file mode 100644 index 5e212f028..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "name: \"test-step-timeout\"\nversion: v0.1.0\ntriggers:\n events:\n - user:create\njobs:\n timeout-job:\n steps:\n - id: timeout\n action: timeout:timeout\n timeout: 5s\n # This step should not be reached\n - id: later-step\n action: timeout:timeout\n timeout: 5s\n", - "source": "out/go/z_v0/deprecated/timeout/.hatchet/step-timeout-workflow.yaml", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/index.ts deleted file mode 100644 index 58d44f5cd..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import * as hatchet from './.hatchet'; - -export { main } -export { hatchet }; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/main.ts deleted file mode 100644 index 080172a63..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction(\"timeout:timeout\", func(ctx context.Context, input *timeoutInput) (result any, err error) {\n\t\t// wait for context done signal\n\t\ttimeStart := time.Now().UTC()\n\t\t<-ctx.Done()\n\t\tfmt.Println(\"context cancelled in \", time.Since(timeStart).Seconds(), \" seconds\")\n\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error starting worker: %w\", err))\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\"user:create\",\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", - "source": "out/go/z_v0/deprecated/timeout/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/.hatchet/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/.hatchet/index.ts deleted file mode 100644 index 33bdccc15..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/.hatchet/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import sample_workflow from './sample-workflow'; - -export { sample_workflow } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.ts deleted file mode 100644 index 99019dcc5..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "name: \"post-user-sign-up\"\nversion: v0.2.0\ntriggers:\n events:\n - user:create\njobs:\n print-user:\n steps:\n - id: echo1\n action: echo:echo\n timeout: 60s\n with:\n message: \"Username is {{ .input.username }}\"\n - id: echo2\n action: echo:echo\n timeout: 60s\n with:\n message: \"Above message is: {{ .steps.echo1.message }}\"\n - id: echo3\n action: echo:echo\n timeout: 60s\n with:\n message: \"Above message is: {{ .steps.echo2.message }}\"\n - id: testObject\n action: echo:object\n timeout: 60s\n with:\n object: \"{{ .steps.echo3.json }}\"\n", - "source": "out/go/z_v0/deprecated/yaml/.hatchet/sample-workflow.yaml", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/README.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/README.ts deleted file mode 100644 index eebbfd731..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/README.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "## YAML Workflow Example\n\nThis example shows how you can create a YAML file in your repository to define the structure of a workflow. This example runs the [sample-workflow.yaml](./.hatchet/sample-workflow.yaml).\n\n## Explanation\n\nThis folder contains a demo example of a workflow that simply echoes the input message as an output. The workflow file showcases the following features:\n\n- Running a simple job with a set of dependent steps\n- Variable references within step arguments -- each subsequent step in a workflow can call `.steps..` to access output arguments\n\n## How to run\n\nNavigate to this directory and run the following steps:\n\n1. Make sure you have a Hatchet server running (see the instructions [here](../../README.md)). After running `task seed`, grab the tenant ID which is output to the console.\n2. Set your environment variables -- if you're using the bundled Temporal server, this will look like:\n\n```sh\ncat > .env <\nHATCHET_CLIENT_TLS_ROOT_CA_FILE=../../hack/dev/certs/ca.cert\nHATCHET_CLIENT_TLS_CERT_FILE=../../hack/dev/certs/client-worker.pem\nHATCHET_CLIENT_TLS_KEY_FILE=../../hack/dev/certs/client-worker.key\nHATCHET_CLIENT_TLS_SERVER_NAME=cluster\nEOF\n```\n\n3. Run the following within this directory:\n\n```sh\ngo run main.go';\n```\n", - "source": "out/go/z_v0/deprecated/yaml/README.md", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/index.ts deleted file mode 100644 index d93173ae8..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import readme from './README'; -import main from './main'; -import * as hatchet from './.hatchet'; - -export { readme } -export { main } -export { hatchet }; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/main.ts deleted file mode 100644 index 03a398d60..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserId string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype actionInput struct {\n\tMessage string `json:\"message\"`\n}\n\ntype actionOut struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc echo(ctx context.Context, input *actionInput) (result *actionOut, err error) {\n\treturn &actionOut{\n\t\tMessage: input.Message,\n\t}, nil\n}\n\nfunc object(ctx context.Context, input *userCreateEvent) error {\n\treturn nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\techoSvc := worker.NewService(\"echo\")\n\n\terr = echoSvc.RegisterAction(echo)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = echoSvc.RegisterAction(object)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error starting worker: %w\", err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \"echo-test\",\n\t\tUserId: \"1234\",\n\t\tData: map[string]string{\n\t\t\t\"test\": \"test\",\n\t\t},\n\t}\n\n\ttime.Sleep(1 * time.Second)\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\"user:create\",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up worker: %w\", err))\n\t}\n}\n", - "source": "out/go/z_v0/deprecated/yaml/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/errors-test/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/errors-test/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/errors-test/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/errors-test/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/errors-test/main.ts deleted file mode 100644 index f0fbba11a..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/errors-test/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"os\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/errors/sentry\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserId string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc StepOne(ctx context.Context) (result *stepOneOutput, err error) {\n\treturn nil, fmt.Errorf(\"this is an error\")\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsentryAlerter, err := sentry.NewSentryAlerter(&sentry.SentryAlerterOpts{\n\t\tDSN: os.Getenv(\"SENTRY_DSN\"),\n\t\tEnvironment: os.Getenv(\"SENTRY_ENVIRONMENT\"),\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t\tworker.WithErrorAlerter(sentryAlerter),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(worker.Event(\"user:create\"), &worker.WorkflowJob{\n\t\tName: \"failing-workflow\",\n\t\tDescription: \"This is a failing workflow.\",\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\t{\n\t\t\t\tFunction: StepOne,\n\t\t\t},\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction(\"echo:echo\", func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t\"message\": input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction(\"echo:object\", func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error starting worker: %w\", err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \"echo-test\",\n\t\tUserId: \"1234\",\n\t\tData: map[string]string{\n\t\t\t\"test\": \"test\",\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\"user:create\",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n}\n", - "source": "out/go/z_v0/errors-test/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/index.ts deleted file mode 100644 index 1ec6b8b90..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/index.ts +++ /dev/null @@ -1,61 +0,0 @@ -import * as assignment_affinity from './assignment-affinity'; -import * as assignment_sticky from './assignment-sticky'; -import * as bulk_imports from './bulk_imports'; -import * as bulk_workflows from './bulk_workflows'; -import * as cancellation from './cancellation'; -import * as compute from './compute'; -import * as concurrency from './concurrency'; -import * as cron from './cron'; -import * as cron_programmatic from './cron-programmatic'; -import * as dag from './dag'; -import * as deprecated from './deprecated'; -import * as errors_test from './errors-test'; -import * as limit_concurrency from './limit-concurrency'; -import * as logging from './logging'; -import * as manual_trigger from './manual-trigger'; -import * as middleware from './middleware'; -import * as namespaced from './namespaced'; -import * as no_tls from './no-tls'; -import * as on_failure from './on-failure'; -import * as procedural from './procedural'; -import * as rate_limit from './rate-limit'; -import * as register_action from './register-action'; -import * as retries from './retries'; -import * as retries_with_backoff from './retries-with-backoff'; -import * as scheduled from './scheduled'; -import * as simple from './simple'; -import * as stream_event from './stream-event'; -import * as stream_event_by_meta from './stream-event-by-meta'; -import * as timeout from './timeout'; -import * as webhook from './webhook'; - -export { assignment_affinity }; -export { assignment_sticky }; -export { bulk_imports }; -export { bulk_workflows }; -export { cancellation }; -export { compute }; -export { concurrency }; -export { cron }; -export { cron_programmatic }; -export { dag }; -export { deprecated }; -export { errors_test }; -export { limit_concurrency }; -export { logging }; -export { manual_trigger }; -export { middleware }; -export { namespaced }; -export { no_tls }; -export { on_failure }; -export { procedural }; -export { rate_limit }; -export { register_action }; -export { retries }; -export { retries_with_backoff }; -export { scheduled }; -export { simple }; -export { stream_event }; -export { stream_event_by_meta }; -export { timeout }; -export { webhook }; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts deleted file mode 100644 index e421cb725..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype concurrencyLimitEvent struct {\n\tIndex int `json:\"index\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \"user-create\", nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\terr = testSvc.On(\n\t\tworker.Events(\"concurrency-test-event\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"concurrency-limit\",\n\t\t\tDescription: \"This limits concurrency to 1 run at a time.\",\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(1),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\t<-ctx.Done()\n\t\t\t\t\tfmt.Println(\"context done, returning\")\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\"interrupted\")\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t\t\t}\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfirstEvent := concurrencyLimitEvent{\n\t\t\tIndex: 0,\n\t\t}\n\n\t\t// push an event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"concurrency-test-event\",\n\t\t\tfirstEvent,\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\"interrupted\")\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): // timeout\n\t\t}\n\n\t\t// push a second event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"concurrency-test-event\",\n\t\t\tconcurrencyLimitEvent{\n\t\t\t\tIndex: 1,\n\t\t\t},\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", - "source": "out/go/z_v0/limit-concurrency/cancel-in-progress/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/main.ts deleted file mode 100644 index c3e05db97..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin-advanced/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype concurrencyLimitEvent struct {\n\tConcurrencyKey string `json:\"concurrency_key\"`\n\tUserId int `json:\"user_id\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n\tConcurrencyWhenFinished int `json:\"concurrency_when_finished\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tctx, cancel := cmdutils.NewInterruptContext()\n\tdefer cancel()\n\n\tif err := run(ctx); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \"concurrency\", nil\n}\n\nvar done = make(chan struct{})\nvar errChan = make(chan error)\n\nvar workflowCount int\nvar countMux sync.Mutex\n\nfunc run(ctx context.Context) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\t// runningCount := 0\n\n\tcountMux := sync.Mutex{}\n\n\tvar countMap = make(map[string]int)\n\tmaxConcurrent := 2\n\n\terr = w.RegisterWorkflow(\n\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"concurrency-limit-round-robin-existing-workflows\",\n\t\t\tDescription: \"This limits concurrency to maxConcurrent runs at a time.\",\n\t\t\tOn: worker.Events(\"test:concurrency-limit-round-robin-existing-workflows\"),\n\t\t\tConcurrency: worker.Expression(\"input.concurrency_key\").MaxRuns(int32(maxConcurrent)).LimitStrategy(types.GroupRoundRobin),\n\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &concurrencyLimitEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"error getting input: %w\", err)\n\t\t\t\t\t}\n\t\t\t\t\tconcurrencyKey := input.ConcurrencyKey\n\t\t\t\t\tcountMux.Lock()\n\n\t\t\t\t\tif countMap[concurrencyKey]+1 > maxConcurrent {\n\t\t\t\t\t\tcountMux.Unlock()\n\t\t\t\t\t\te := fmt.Errorf(\"concurrency limit exceeded for %d we have %d workers running\", input.UserId, countMap[concurrencyKey])\n\t\t\t\t\t\terrChan <- e\n\t\t\t\t\t\treturn nil, e\n\t\t\t\t\t}\n\t\t\t\t\tcountMap[concurrencyKey]++\n\n\t\t\t\t\tcountMux.Unlock()\n\n\t\t\t\t\tfmt.Println(\"received event\", input.UserId)\n\n\t\t\t\t\ttime.Sleep(10 * time.Second)\n\n\t\t\t\t\tfmt.Println(\"processed event\", input.UserId)\n\n\t\t\t\t\tcountMux.Lock()\n\t\t\t\t\tcountMap[concurrencyKey]--\n\t\t\t\t\tcountMux.Unlock()\n\n\t\t\t\t\tdone <- struct{}{}\n\n\t\t\t\t\treturn &stepOneOutput{}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\tvar workflowRuns []*client.WorkflowRun\n\n\t\tfor i := 0; i < 1; i++ {\n\t\t\tworkflowCount++\n\t\t\tevent := concurrencyLimitEvent{\n\t\t\t\tConcurrencyKey: \"key\",\n\t\t\t\tUserId: i,\n\t\t\t}\n\t\t\tworkflowRuns = append(workflowRuns, &client.WorkflowRun{\n\t\t\t\tName: \"concurrency-limit-round-robin-existing-workflows\",\n\t\t\t\tInput: event,\n\t\t\t})\n\n\t\t}\n\n\t\t// create a second one with a different key\n\n\t\t// so the bug we are testing here is that total concurrency for any one group should be 2\n\t\t// but if we have more than one group we end up with 4 running when only 2 + 1 are eligible to run\n\n\t\tfor i := 0; i < 3; i++ {\n\t\t\tworkflowCount++\n\n\t\t\tevent := concurrencyLimitEvent{\n\t\t\t\tConcurrencyKey: \"secondKey\",\n\t\t\t\tUserId: i,\n\t\t\t}\n\t\t\tworkflowRuns = append(workflowRuns, &client.WorkflowRun{\n\t\t\t\tName: \"concurrency-limit-round-robin-existing-workflows\",\n\t\t\t\tInput: event,\n\t\t\t})\n\n\t\t}\n\n\t\t_, err := c.Admin().BulkRunWorkflow(workflowRuns)\n\t\tif err != nil {\n\t\t\tfmt.Println(\"error running workflow\", err)\n\t\t}\n\n\t\tfmt.Println(\"ran workflows\")\n\n\t}()\n\n\ttime.Sleep(2 * time.Second)\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\tdefer cleanup()\n\n\tfor {\n\t\tselect {\n\t\tcase <-ctx.Done():\n\t\t\treturn nil\n\t\tcase <-time.After(20 * time.Second):\n\t\t\treturn fmt.Errorf(\"timeout\")\n\t\tcase err := <-errChan:\n\t\t\treturn err\n\t\tcase <-done:\n\t\t\tcountMux.Lock()\n\t\t\tworkflowCount--\n\t\t\tcountMux.Unlock()\n\t\t\tif workflowCount == 0 {\n\t\t\t\ttime.Sleep(1 * time.Second)\n\t\t\t\treturn nil\n\t\t\t}\n\n\t\t}\n\t}\n}\n", - "source": "out/go/z_v0/limit-concurrency/group-round-robin-advanced/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts deleted file mode 100644 index 51e2eccda..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype concurrencyLimitEvent struct {\n\tUserId int `json:\"user_id\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\tinput := &concurrencyLimitEvent{}\n\terr := ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn \"\", fmt.Errorf(\"error getting input: %w\", err)\n\t}\n\n\treturn fmt.Sprintf(\"%d\", input.UserId), nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\terr = testSvc.On(\n\t\tworker.Events(\"concurrency-test-event-rr\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"concurrency-limit-round-robin\",\n\t\t\tDescription: \"This limits concurrency to 2 runs at a time.\",\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(2).LimitStrategy(types.GroupRoundRobin),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &concurrencyLimitEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"error getting input: %w\", err)\n\t\t\t\t\t}\n\n\t\t\t\t\tfmt.Println(\"received event\", input.UserId)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\tfmt.Println(\"processed event\", input.UserId)\n\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\"interrupted\")\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfor i := 0; i < 20; i++ {\n\t\t\tvar event concurrencyLimitEvent\n\n\t\t\tif i < 10 {\n\t\t\t\tevent = concurrencyLimitEvent{0}\n\t\t\t} else {\n\t\t\t\tevent = concurrencyLimitEvent{1}\n\t\t\t}\n\n\t\t\tc.Event().Push(context.Background(), \"concurrency-test-event-rr\", event)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\"interrupted\")\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): //timeout\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\treturn fmt.Errorf(\"error cleaning up: %w\", err)\n\t\t\t}\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", - "source": "out/go/z_v0/limit-concurrency/group-round-robin/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/index.ts deleted file mode 100644 index 3d31439c8..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import * as cancel_in_progress from './cancel-in-progress'; -import * as group_round_robin from './group-round-robin'; -import * as group_round_robin_advanced from './group-round-robin-advanced'; - -export { cancel_in_progress }; -export { group_round_robin }; -export { group_round_robin_advanced }; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/logging/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/logging/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/logging/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/logging/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/logging/main.ts deleted file mode 100644 index fddeec0e9..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/logging/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:log:simple\"),\n\t\t\tName: \"simple\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tConcurrency: worker.Expression(\"input.user_id\"),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-one\")\n\t\t\t\t\tevents <- \"step-one\"\n\n\t\t\t\t\tfor i := 0; i < 1000; i++ {\n\t\t\t\t\t\tctx.Log(fmt.Sprintf(\"step-one: %d\", i))\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Username is: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\"pushing event user:create:simple\")\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:log:simple\",\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\"hello\": \"world\",\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/logging/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/index.ts deleted file mode 100644 index 897a74a46..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import * as trigger from './trigger'; -import * as worker from './worker'; - -export { trigger }; -export { worker }; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/trigger/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/trigger/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/trigger/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/trigger/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/trigger/main.ts deleted file mode 100644 index 3ffc57910..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/trigger/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\ttime.Sleep(1 * time.Second)\n\n\t// trigger workflow\n\tworkflow, err := c.Admin().RunWorkflow(\n\t\t\"post-user-update\",\n\t\t&userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t},\n\t\tclient.WithRunMetadata(map[string]interface{}{\n\t\t\t\"hello\": \"world\",\n\t\t}),\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error running workflow: %w\", err)\n\t}\n\n\tfmt.Println(\"workflow run id:\", workflow.WorkflowRunId())\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\terr = c.Subscribe().On(interruptCtx, workflow.WorkflowRunId(), func(event client.WorkflowEvent) error {\n\t\tfmt.Println(event.EventPayload)\n\n\t\treturn nil\n\t})\n\n\treturn err\n}\n", - "source": "out/go/z_v0/manual-trigger/trigger/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/worker/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/worker/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/worker/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/worker/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/worker/main.ts deleted file mode 100644 index b35494e8c..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/manual-trigger/worker/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\terr = testSvc.On(\n\t\tworker.Events(\"user:create:simple\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"post-user-update\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(1 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 1 got username: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 2 got username: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-one\", step1Out)\n\n\t\t\t\t\tstep2Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-two\", step2Out)\n\n\t\t\t\t\ttime.Sleep(3 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 3: has parents 1 and 2:\" + step1Out.Message + \", \" + step2Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-three\").AddParents(\"step-one\", \"step-two\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-one\", step1Out)\n\n\t\t\t\t\tstep3Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-three\", step3Out)\n\n\t\t\t\t\ttime.Sleep(4 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 4: has parents 1 and 3\" + step1Out.Message + \", \" + step3Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-four\").AddParents(\"step-one\", \"step-three\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep4Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\"step-four\", step4Out)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \"Step 5: has parent 4\" + step4Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-five\").AddParents(\"step-four\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\treturn fmt.Errorf(\"error cleaning up: %w\", err)\n\t}\n\n\treturn nil\n}\n", - "source": "out/go/z_v0/manual-trigger/worker/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/middleware/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/middleware/index.ts deleted file mode 100644 index a265089e4..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/middleware/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main } -export { run } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/middleware/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/middleware/main.ts deleted file mode 100644 index 459f35f55..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/middleware/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tch := cmdutils.InterruptChan()\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"cleanup() error = %v\", err))\n\t}\n}\n", - "source": "out/go/z_v0/middleware/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/middleware/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/middleware/run.ts deleted file mode 100644 index 4ecbccb4a..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/middleware/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf(\"1st-middleware\")\n\t\tevents <- \"1st-middleware\"\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \"testkey\", \"testvalue\"))\n\t\treturn next(ctx)\n\t})\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf(\"2nd-middleware\")\n\t\tevents <- \"2nd-middleware\"\n\n\t\t// time the function duration\n\t\tstart := time.Now()\n\t\terr := next(ctx)\n\t\tduration := time.Since(start)\n\t\tfmt.Printf(\"step function took %s\\n\", duration)\n\t\treturn err\n\t})\n\n\ttestSvc := w.NewService(\"test\")\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tevents <- \"svc-middleware\"\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \"svckey\", \"svcvalue\"))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.On(\n\t\tworker.Events(\"user:create:middleware\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"middleware\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-one\")\n\t\t\t\t\tevents <- \"step-one\"\n\n\t\t\t\t\ttestVal := ctx.Value(\"testkey\").(string)\n\t\t\t\t\tevents <- testVal\n\t\t\t\t\tsvcVal := ctx.Value(\"svckey\").(string)\n\t\t\t\t\tevents <- svcVal\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Username is: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\"step-one\", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-two\")\n\t\t\t\t\tevents <- \"step-two\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Above message is: \" + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\").AddParents(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\"pushing event user:create:middleware\")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:create:middleware\",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/middleware/run.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/namespaced/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/namespaced/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/namespaced/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/namespaced/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/namespaced/main.ts deleted file mode 100644 index 95b0178dd..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/namespaced/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \"user-create\", nil\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New(\n\t\tclient.WithNamespace(\"sample\"),\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\terr = testSvc.On(\n\t\tworker.Events(\"user:create:simple\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"simple\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-one\")\n\t\t\t\t\tevents <- \"step-one\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Username is: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\"step-one\", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-two\")\n\t\t\t\t\tevents <- \"step-two\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Above message is: \" + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\").AddParents(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\"pushing event user:create:simple\")\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:create:simple\",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/namespaced/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/no-tls/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/no-tls/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/no-tls/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/no-tls/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/no-tls/main.ts deleted file mode 100644 index 02089d025..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/no-tls/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype stepOutput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"error creating client: %v\", err))\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"error creating worker: %v\", err))\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\terr = testSvc.On(\n\t\tworker.Events(\"simple\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"simple-workflow\",\n\t\t\tDescription: \"Simple one-step workflow.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tfmt.Println(\"executed step 1\")\n\n\t\t\t\t\treturn &stepOutput{}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"error registering workflow: %v\", err))\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Sprintf(\"error starting worker: %v\", err))\n\t}\n\n\t<-interruptCtx.Done()\n\tif err := cleanup(); err != nil {\n\t\tpanic(err)\n\t}\n}\n", - "source": "out/go/z_v0/no-tls/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/on-failure/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/on-failure/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/on-failure/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/on-failure/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/on-failure/main.ts deleted file mode 100644 index 9e7c65df3..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/on-failure/main.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\n// > OnFailure Step\n// This workflow will fail because the step will throw an error\n// we define an onFailure step to handle this case\n\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t// 👀 this step will always raise an exception\n\treturn nil, fmt.Errorf(\"test on failure\")\n}\n\nfunc OnFailure(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t// run cleanup code or notifications here\n\n\t// 👀 you can access the error from the failed step(s) like this\n\tfmt.Println(ctx.StepRunErrors())\n\n\treturn &stepOneOutput{\n\t\tMessage: \"Failure!\",\n\t}, nil\n}\n\nfunc main() {\n\t// ...\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// 👀 we define an onFailure step to handle this case\n\terr = w.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"on-failure-workflow\",\n\t\t\tDescription: \"This runs at a scheduled time.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName(\"step-one\"),\n\t\t\t},\n\t\t\tOnFailure: &worker.WorkflowJob{\n\t\t\t\tName: \"scheduled-workflow-failure\",\n\t\t\t\tDescription: \"This runs when the scheduled workflow fails.\",\n\t\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\t\tworker.Fn(OnFailure).SetName(\"on-failure\"),\n\t\t\t\t},\n\t\t\t},\n\t\t},\n\t)\n\n\t// ...\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n\t// ,\n}\n\n", - "source": "out/go/z_v0/on-failure/main.go", - "blocks": { - "onfailure_step": { - "start": 19, - "stop": 108 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/procedural/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/procedural/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/procedural/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/procedural/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/procedural/main.ts deleted file mode 100644 index 27e5839df..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/procedural/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"sync\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\t\"golang.org/x/sync/errgroup\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\nconst NUM_CHILDREN = 50\n\ntype proceduralChildInput struct {\n\tIndex int `json:\"index\"`\n}\n\ntype proceduralChildOutput struct {\n\tIndex int `json:\"index\"`\n}\n\ntype proceduralParentOutput struct {\n\tChildSum int `json:\"child_sum\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 5*NUM_CHILDREN)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\terr = testSvc.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"procedural-parent-workflow\",\n\t\t\tDescription: \"This is a test of procedural workflows.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(\n\t\t\t\t\tfunc(ctx worker.HatchetContext) (result *proceduralParentOutput, err error) {\n\t\t\t\t\t\tchildWorkflows := make([]*client.Workflow, NUM_CHILDREN)\n\n\t\t\t\t\t\tfor i := 0; i < NUM_CHILDREN; i++ {\n\t\t\t\t\t\t\tchildInput := proceduralChildInput{\n\t\t\t\t\t\t\t\tIndex: i,\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tchildWorkflow, err := ctx.SpawnWorkflow(\"procedural-child-workflow\", childInput, &worker.SpawnWorkflowOpts{\n\t\t\t\t\t\t\t\tAdditionalMetadata: &map[string]string{\n\t\t\t\t\t\t\t\t\t\"childKey\": \"childValue\",\n\t\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t})\n\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tchildWorkflows[i] = childWorkflow\n\n\t\t\t\t\t\t\tevents <- fmt.Sprintf(\"child-%d-started\", i)\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\teg := errgroup.Group{}\n\n\t\t\t\t\t\teg.SetLimit(NUM_CHILDREN)\n\n\t\t\t\t\t\tchildOutputs := make([]int, 0)\n\t\t\t\t\t\tchildOutputsMu := sync.Mutex{}\n\n\t\t\t\t\t\tfor i, childWorkflow := range childWorkflows {\n\t\t\t\t\t\t\teg.Go(func(i int, childWorkflow *client.Workflow) func() error {\n\t\t\t\t\t\t\t\treturn func() error {\n\t\t\t\t\t\t\t\t\tchildResult, err := childWorkflow.Result()\n\n\t\t\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tchildOutput := proceduralChildOutput{}\n\n\t\t\t\t\t\t\t\t\terr = childResult.StepOutput(\"step-one\", &childOutput)\n\n\t\t\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\t\t\treturn err\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tchildOutputsMu.Lock()\n\t\t\t\t\t\t\t\t\tchildOutputs = append(childOutputs, childOutput.Index)\n\t\t\t\t\t\t\t\t\tchildOutputsMu.Unlock()\n\n\t\t\t\t\t\t\t\t\tevents <- fmt.Sprintf(\"child-%d-completed\", childOutput.Index)\n\n\t\t\t\t\t\t\t\t\treturn nil\n\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}(i, childWorkflow))\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tfinishedCh := make(chan struct{})\n\n\t\t\t\t\t\tgo func() {\n\t\t\t\t\t\t\tdefer close(finishedCh)\n\t\t\t\t\t\t\terr = eg.Wait()\n\t\t\t\t\t\t}()\n\n\t\t\t\t\t\ttimer := time.NewTimer(60 * time.Second)\n\n\t\t\t\t\t\tselect {\n\t\t\t\t\t\tcase <-finishedCh:\n\t\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\tcase <-timer.C:\n\t\t\t\t\t\t\tincomplete := make([]int, 0)\n\t\t\t\t\t\t\t// print non-complete children\n\t\t\t\t\t\t\tfor i := range childWorkflows {\n\t\t\t\t\t\t\t\tcompleted := false\n\t\t\t\t\t\t\t\tfor _, childOutput := range childOutputs {\n\t\t\t\t\t\t\t\t\tif childOutput == i {\n\t\t\t\t\t\t\t\t\t\tcompleted = true\n\t\t\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tif !completed {\n\t\t\t\t\t\t\t\t\tincomplete = append(incomplete, i)\n\t\t\t\t\t\t\t\t}\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\treturn nil, fmt.Errorf(\"timed out waiting for the following child workflows to complete: %v\", incomplete)\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tsum := 0\n\n\t\t\t\t\t\tfor _, childOutput := range childOutputs {\n\t\t\t\t\t\t\tsum += childOutput\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn &proceduralParentOutput{\n\t\t\t\t\t\t\tChildSum: sum,\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t},\n\t\t\t\t).SetTimeout(\"10m\"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\terr = testSvc.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"procedural-child-workflow\",\n\t\t\tDescription: \"This is a test of procedural workflows.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(\n\t\t\t\t\tfunc(ctx worker.HatchetContext) (result *proceduralChildOutput, err error) {\n\t\t\t\t\t\tinput := proceduralChildInput{}\n\n\t\t\t\t\t\terr = ctx.WorkflowInput(&input)\n\n\t\t\t\t\t\tif err != nil {\n\t\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\treturn &proceduralChildOutput{\n\t\t\t\t\t\t\tIndex: input.Index,\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\ttime.Sleep(1 * time.Second)\n\n\t\t_, err := c.Admin().RunWorkflow(\"procedural-parent-workflow\", nil)\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error running workflow: %w\", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/procedural/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/rate-limit/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/rate-limit/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/rate-limit/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/rate-limit/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/rate-limit/main.ts deleted file mode 100644 index c5bdcac8b..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/rate-limit/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/client/types\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype rateLimitInput struct {\n\tIndex int `json:\"index\"`\n\tUserId string `json:\"user_id\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &rateLimitInput{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tctx.StreamEvent([]byte(fmt.Sprintf(\"This is a stream event %d\", input.Index)))\n\n\treturn &stepOneOutput{\n\t\tMessage: fmt.Sprintf(\"This ran at %s\", time.Now().String()),\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = c.Admin().PutRateLimit(\"api1\", &types.RateLimitOpts{\n\t\tMax: 12,\n\t\tDuration: types.Minute,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tunitExpr := \"int(input.index) + 1\"\n\tkeyExpr := \"input.user_id\"\n\tlimitValueExpr := \"3\"\n\n\terr = w.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"rate-limit-workflow\",\n\t\t\tDescription: \"This illustrates rate limiting.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName(\"step-one\").SetRateLimit(\n\t\t\t\t\tworker.RateLimit{\n\t\t\t\t\t\tKey: \"per-user-rate-limit\",\n\t\t\t\t\t\tKeyExpr: &keyExpr,\n\t\t\t\t\t\tUnitsExpr: &unitExpr,\n\t\t\t\t\t\tLimitValueExpr: &limitValueExpr,\n\t\t\t\t\t},\n\t\t\t\t),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor i := 0; i < 12; i++ {\n\t\tfor j := 0; j < 3; j++ {\n\t\t\t_, err = c.Admin().RunWorkflow(\"rate-limit-workflow\", &rateLimitInput{\n\t\t\t\tIndex: j,\n\t\t\t\tUserId: fmt.Sprintf(\"user-%d\", i),\n\t\t\t})\n\n\t\t\tif err != nil {\n\t\t\t\tpanic(err)\n\t\t\t}\n\t\t}\n\t}\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n}\n", - "source": "out/go/z_v0/rate-limit/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/register-action/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/register-action/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/register-action/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/register-action/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/register-action/main.ts deleted file mode 100644 index f8b368237..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/register-action/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserId string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc StepOne(ctx context.Context, input *userCreateEvent) (result *stepOneOutput, err error) {\n\t// could get from context\n\t// testVal := ctx.Value(\"testkey\").(string)\n\t// svcVal := ctx.Value(\"svckey\").(string)\n\n\treturn &stepOneOutput{\n\t\tMessage: \"Username is: \" + input.Username,\n\t}, nil\n}\n\nfunc StepTwo(ctx context.Context, input *stepOneOutput) (result *stepOneOutput, err error) {\n\treturn &stepOneOutput{\n\t\tMessage: \"Above message is: \" + input.Message,\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \"testkey\", \"testvalue\"))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.RegisterAction(StepOne, worker.WithActionName(\"step-one\"))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.RegisterAction(StepTwo, worker.WithActionName(\"step-two\"))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.On(\n\t\tworker.Events(\"user:create\", \"user:update\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"post-user-update\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\t// example of calling a registered action from the worker (includes service name)\n\t\t\t\tw.Call(\"test:step-one\"),\n\t\t\t\t// example of calling a registered action from a service\n\t\t\t\ttestSvc.Call(\"step-two\"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction(\"echo:echo\", func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t\"message\": input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction(\"echo:object\", func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \"echo-test\",\n\t\tUserId: \"1234\",\n\t\tData: map[string]string{\n\t\t\t\"test\": \"test\",\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\"user:create\",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interrupt:\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t\t\t}\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", - "source": "out/go/z_v0/register-action/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/retries-with-backoff/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/retries-with-backoff/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/retries-with-backoff/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/retries-with-backoff/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/retries-with-backoff/main.ts deleted file mode 100644 index 9956b885b..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/retries-with-backoff/main.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\n// > Backoff\n\n// ... normal function definition\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tif ctx.RetryCount() < 3 {\n\t\treturn nil, fmt.Errorf(\"failure\")\n\t}\n\n\treturn &stepOneOutput{\n\t\tMessage: \"Success!\",\n\t}, nil\n}\n\n// ,\n\nfunc main() {\n\t// ...\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ,\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"retry-with-backoff-workflow\",\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tDescription: \"Demonstrates retry with exponential backoff.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName(\"with-backoff\").\n\t\t\t\t\tSetRetries(10).\n\t\t\t\t\t// 👀 Backoff configuration\n\t\t\t\t\t// 👀 Maximum number of seconds to wait between retries\n\t\t\t\t\tSetRetryBackoffFactor(2.0).\n\t\t\t\t\t// 👀 Factor to increase the wait time between retries.\n\t\t\t\t\t// This sequence will be 2s, 4s, 8s, 16s, 32s, 60s... due to the maxSeconds limit\n\t\t\t\t\tSetRetryMaxBackoffSeconds(60),\n\t\t\t},\n\t\t},\n\t)\n\n\t// ...\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n\t<-interruptCtx.Done()\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n\t// ,\n}\n\n", - "source": "out/go/z_v0/retries-with-backoff/main.go", - "blocks": { - "backoff": { - "start": 18, - "stop": 98 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/retries/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/retries/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/retries/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/retries/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/retries/main.ts deleted file mode 100644 index f5578f659..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/retries/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \"user-create\", nil\n}\n\ntype retryWorkflow struct {\n\tretries int\n}\n\nfunc (r *retryWorkflow) StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &userCreateEvent{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif r.retries < 2 {\n\t\tr.retries++\n\t\treturn nil, fmt.Errorf(\"error\")\n\t}\n\n\tlog.Printf(\"finished step-one\")\n\treturn &stepOneOutput{\n\t\tMessage: \"Username is: \" + input.Username,\n\t}, nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\ttestSvc := w.NewService(\"test\")\n\n\twk := &retryWorkflow{}\n\n\terr = testSvc.On(\n\t\tworker.Events(\"user:create:simple\"),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"simple\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(wk.StepOne).SetName(\"step-one\").SetRetries(4),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \"echo-test\",\n\t\tUserID: \"1234\",\n\t\tData: map[string]string{\n\t\t\t\"test\": \"test\",\n\t\t},\n\t}\n\n\tlog.Printf(\"pushing event user:create:simple\")\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t\"user:create:simple\",\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error pushing event: %w\", err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\treturn fmt.Errorf(\"error cleaning up worker: %w\", err)\n\t}\n\n\treturn nil\n}\n", - "source": "out/go/z_v0/retries/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/scheduled/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/scheduled/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/scheduled/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/scheduled/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/scheduled/main.ts deleted file mode 100644 index cacd2edc6..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/scheduled/main.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\n// > Create\n// ... normal workflow definition\ntype printOutput struct{}\n\nfunc print(ctx context.Context) (result *printOutput, err error) {\n\tfmt.Println(\"called print:print\")\n\n\treturn &printOutput{}, nil\n}\n\n// ,\nfunc main() {\n\t// ... initialize client, worker and workflow\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: \"schedule-workflow\",\n\t\t\tDescription: \"Demonstrates a simple scheduled workflow\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(print),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// ,\n\n\tgo func() {\n\t\t// 👀 define the scheduled workflow to run in a minute\n\t\tschedule, err := c.Schedule().Create(\n\t\t\tcontext.Background(),\n\t\t\t\"schedule-workflow\",\n\t\t\t&client.ScheduleOpts{\n\t\t\t\t// 👀 define the time to run the scheduled workflow, in UTC\n\t\t\t\tTriggerAt: time.Now().UTC().Add(time.Minute),\n\t\t\t\tInput: map[string]interface{}{\n\t\t\t\t\t\"message\": \"Hello, world!\",\n\t\t\t\t},\n\t\t\t\tAdditionalMetadata: map[string]string{},\n\t\t\t},\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tfmt.Println(schedule.TriggerAt, schedule.WorkflowName)\n\t}()\n\n\t// ... wait for interrupt signal\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n\t// ,\n}\n\n\nfunc ListScheduledWorkflows() {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > List\n\tschedules, err := c.Schedule().List(context.Background())\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor _, schedule := range *schedules.Rows {\n\t\tfmt.Println(schedule.TriggerAt, schedule.WorkflowName)\n\t}\n}\n\nfunc DeleteScheduledWorkflow(id string) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// > Delete\n\t// 👀 id is the schedule's metadata id, can get it via schedule.Metadata.Id\n\terr = c.Schedule().Delete(context.Background(), id)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n", - "source": "out/go/z_v0/scheduled/main.go", - "blocks": { - "create": { - "start": 16, - "stop": 107 - }, - "list": { - "start": 117, - "stop": 117 - }, - "delete": { - "start": 136, - "stop": 137 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/simple/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/simple/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/simple/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/simple/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/simple/main.ts deleted file mode 100644 index 3d1dd920f..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/simple/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\"user:create:simple\"),\n\t\t\tName: \"simple\",\n\t\t\tDescription: \"This runs after an update to the user model.\",\n\t\t\tConcurrency: worker.Expression(\"input.user_id\"),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-one\")\n\t\t\t\t\tevents <- \"step-one\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Username is: \" + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\"step-one\"),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\"step-one\", input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\"step-two\")\n\t\t\t\t\tevents <- \"step-two\"\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \"Above message is: \" + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\"step-two\").AddParents(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\"pushing event user:create:simple\")\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:create:simple\",\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\"hello\": \"world\",\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/simple/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/stream-event-by-meta/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/stream-event-by-meta/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/stream-event-by-meta/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/stream-event-by-meta/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/stream-event-by-meta/main.ts deleted file mode 100644 index 46697c6da..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/stream-event-by-meta/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"math/rand/v2\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype streamEventInput struct {\n\tIndex int `json:\"index\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &streamEventInput{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tctx.StreamEvent([]byte(fmt.Sprintf(\"This is a stream event %d\", input.Index)))\n\n\treturn &stepOneOutput{\n\t\tMessage: fmt.Sprintf(\"This ran at %s\", time.Now().String()),\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"stream-event-workflow\",\n\t\t\tDescription: \"This sends a stream event.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\t_, err = w.Start()\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n\t// Generate a random number between 1 and 100\n\tstreamKey := \"streamKey\"\n\tstreamValue := fmt.Sprintf(\"stream-event-%d\", rand.IntN(100)+1)\n\n\t_, err = c.Admin().RunWorkflow(\"stream-event-workflow\", &streamEventInput{\n\t\tIndex: 0,\n\t},\n\t\tclient.WithRunMetadata(map[string]interface{}{\n\t\t\tstreamKey: streamValue,\n\t\t}),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = c.Subscribe().StreamByAdditionalMetadata(interruptCtx, streamKey, streamValue, func(event client.StreamEvent) error {\n\t\tfmt.Println(string(event.Message))\n\t\treturn nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n", - "source": "out/go/z_v0/stream-event-by-meta/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/stream-event/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/stream-event/index.ts deleted file mode 100644 index 0bc90ab47..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/stream-event/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import main from './main'; - -export { main } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/stream-event/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/stream-event/main.ts deleted file mode 100644 index 0f6b52e3e..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/stream-event/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/cmdutils\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype streamEventInput struct {\n\tIndex int `json:\"index\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &streamEventInput{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tctx.StreamEvent([]byte(fmt.Sprintf(\"This is a stream event %d\", input.Index)))\n\n\treturn &stepOneOutput{\n\t\tMessage: fmt.Sprintf(\"This ran at %s\", time.Now().String()),\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(\n\t\tworker.NoTrigger(),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \"stream-event-workflow\",\n\t\t\tDescription: \"This sends a stream event.\",\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(StepOne).SetName(\"step-one\"),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\t_, err = w.Start()\n\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error cleaning up: %w\", err))\n\t}\n\n\tworkflow, err := c.Admin().RunWorkflow(\"stream-event-workflow\", &streamEventInput{\n\t\tIndex: 0,\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = c.Subscribe().Stream(interruptCtx, workflow.WorkflowRunId(), func(event client.StreamEvent) error {\n\t\tfmt.Println(string(event.Message))\n\n\t\treturn nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n", - "source": "out/go/z_v0/stream-event/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/timeout/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/timeout/index.ts deleted file mode 100644 index a265089e4..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/timeout/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main } -export { run } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/timeout/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/timeout/main.ts deleted file mode 100644 index 325c6b179..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/timeout/main.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"time\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\n\t// > TimeoutStep\n\tcleanup, err := run(events, worker.WorkflowJob{\n\t\tName: \"timeout\",\n\t\tDescription: \"timeout\",\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\ttime.Sleep(time.Second * 60)\n\t\t\t\treturn nil, nil\n\t\t\t}).SetName(\"step-one\").SetTimeout(\"10s\"),\n\t\t},\n\t})\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-events\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\"cleanup() error = %v\", err))\n\t}\n}\n", - "source": "out/go/z_v0/timeout/main.go", - "blocks": { - "timeoutstep": { - "start": 31, - "stop": 40 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/timeout/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/timeout/run.ts deleted file mode 100644 index 0a7d2a6ad..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/timeout/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"fmt\"\n\t\"log\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\nfunc run(done chan<- string, job worker.WorkflowJob) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating client: %w\", err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error creating worker: %w\", err)\n\t}\n\n\terr = w.On(\n\t\tworker.Events(\"user:create:timeout\"),\n\t\t&job,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error registering workflow: %w\", err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\"pushing event\")\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \"echo-test\",\n\t\t\tUserID: \"1234\",\n\t\t\tData: map[string]string{\n\t\t\t\t\"test\": \"test\",\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\"user:create:timeout\",\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\"error pushing event: %w\", err))\n\t\t}\n\n\t\ttime.Sleep(20 * time.Second)\n\n\t\tdone <- \"done\"\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\"error starting worker: %w\", err)\n\t}\n\n\treturn cleanup, nil\n}\n", - "source": "out/go/z_v0/timeout/run.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/webhook/index.ts b/frontend/docs/lib/generated/snips/go/z_v0/webhook/index.ts deleted file mode 100644 index a265089e4..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/webhook/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import main from './main'; -import run from './run'; - -export { main } -export { run } diff --git a/frontend/docs/lib/generated/snips/go/z_v0/webhook/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/webhook/main.ts deleted file mode 100644 index 91a53266b..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/webhook/main.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"fmt\"\n\t\"log\"\n\n\t\"github.com/joho/godotenv\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\"username\"`\n\tUserID string `json:\"user_id\"`\n\tData map[string]string `json:\"data\"`\n}\n\ntype output struct {\n\tMessage string `json:\"message\"`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tc, err := client.New()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error creating client: %w\", err))\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\"error creating worker: %w\", err))\n\t}\n\n\tworkflow := \"webhook\"\n\tevent := \"user:create:webhook\"\n\twf := &worker.WorkflowJob{\n\t\tName: workflow,\n\t\tDescription: workflow,\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *output, err error) {\n\t\t\t\tlog.Printf(\"step name: %s\", ctx.StepName())\n\t\t\t\treturn &output{\n\t\t\t\t\tMessage: \"hi from \" + ctx.StepName(),\n\t\t\t\t}, nil\n\t\t\t}).SetName(\"webhook-step-one\").SetTimeout(\"10s\"),\n\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *output, err error) {\n\t\t\t\tlog.Printf(\"step name: %s\", ctx.StepName())\n\t\t\t\treturn &output{\n\t\t\t\t\tMessage: \"hi from \" + ctx.StepName(),\n\t\t\t\t}, nil\n\t\t\t}).SetName(\"webhook-step-one\").SetTimeout(\"10s\"),\n\t\t},\n\t}\n\n\thandler := w.WebhookHttpHandler(worker.WebhookHandlerOptions{\n\t\tSecret: \"secret\",\n\t}, wf)\n\tport := \"8741\"\n\terr = run(\"webhook-demo\", w, port, handler, c, workflow, event)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n", - "source": "out/go/z_v0/webhook/main.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/go/z_v0/webhook/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/webhook/run.ts deleted file mode 100644 index d3d341c94..000000000 --- a/frontend/docs/lib/generated/snips/go/z_v0/webhook/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "go", - "content": "package main\n\nimport (\n\t\"context\"\n\t\"errors\"\n\t\"fmt\"\n\t\"log\"\n\t\"net/http\"\n\t\"time\"\n\n\t\"github.com/hatchet-dev/hatchet/pkg/client\"\n\t\"github.com/hatchet-dev/hatchet/pkg/worker\"\n)\n\nfunc run(\n\tname string,\n\tw *worker.Worker,\n\tport string,\n\thandler func(w http.ResponseWriter, r *http.Request), c client.Client, workflow string, event string,\n) error {\n\t// create webserver to handle webhook requests\n\tmux := http.NewServeMux()\n\n\t// Register the HelloHandler to the /hello route\n\tmux.HandleFunc(\"/webhook\", handler)\n\n\t// Create a custom server\n\tserver := &http.Server{\n\t\tAddr: \":\" + port,\n\t\tHandler: mux,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tIdleTimeout: 15 * time.Second,\n\t}\n\n\tdefer func(server *http.Server, ctx context.Context) {\n\t\terr := server.Shutdown(ctx)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}(server, context.Background())\n\n\tgo func() {\n\t\tif err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tsecret := \"secret\"\n\tif err := w.RegisterWebhook(worker.RegisterWebhookWorkerOpts{\n\t\tName: \"test-\" + name,\n\t\tURL: fmt.Sprintf(\"http://localhost:%s/webhook\", port),\n\t\tSecret: &secret,\n\t}); err != nil {\n\t\treturn fmt.Errorf(\"error setting up webhook: %w\", err)\n\t}\n\n\ttime.Sleep(30 * time.Second)\n\n\tlog.Printf(\"pushing event\")\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \"echo-test\",\n\t\tUserID: \"1234\",\n\t\tData: map[string]string{\n\t\t\t\"test\": \"test\",\n\t\t},\n\t}\n\n\t// push an event\n\terr := c.Event().Push(\n\t\tcontext.Background(),\n\t\tevent,\n\t\ttestEvent,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\"error pushing event: %w\", err)\n\t}\n\n\ttime.Sleep(5 * time.Second)\n\n\treturn nil\n}\n", - "source": "out/go/z_v0/webhook/run.go", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/index.ts b/frontend/docs/lib/generated/snips/index.ts deleted file mode 100644 index 60865d89f..000000000 --- a/frontend/docs/lib/generated/snips/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import types from './types'; -import * as go from './go'; -import * as python from './python'; -import * as typescript from './typescript'; - -export { types } -export { go }; -export { python }; -export { typescript }; diff --git a/frontend/docs/lib/generated/snips/python/__init__.ts b/frontend/docs/lib/generated/snips/python/__init__.ts deleted file mode 100644 index 310afdd85..000000000 --- a/frontend/docs/lib/generated/snips/python/__init__.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "", - "source": "out/python/__init__.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/affinity_workers/index.ts b/frontend/docs/lib/generated/snips/python/affinity_workers/index.ts deleted file mode 100644 index c443f556e..000000000 --- a/frontend/docs/lib/generated/snips/python/affinity_workers/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/affinity_workers/trigger.ts b/frontend/docs/lib/generated/snips/python/affinity_workers/trigger.ts deleted file mode 100644 index c924bd072..000000000 --- a/frontend/docs/lib/generated/snips/python/affinity_workers/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.affinity_workers.worker import affinity_worker_workflow\nfrom hatchet_sdk import TriggerWorkflowOptions\n\naffinity_worker_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata={\"hello\": \"moon\"}),\n)\n", - "source": "out/python/affinity_workers/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/affinity_workers/worker.ts b/frontend/docs/lib/generated/snips/python/affinity_workers/worker.ts deleted file mode 100644 index 4cfd03976..000000000 --- a/frontend/docs/lib/generated/snips/python/affinity_workers/worker.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Context, EmptyModel, Hatchet, WorkerLabelComparator\nfrom hatchet_sdk.labels import DesiredWorkerLabel\n\nhatchet = Hatchet(debug=True)\n\n\n# > AffinityWorkflow\n\naffinity_worker_workflow = hatchet.workflow(name=\"AffinityWorkflow\")\n\n\n@affinity_worker_workflow.task(\n desired_worker_labels={\n \"model\": DesiredWorkerLabel(value=\"fancy-ai-model-v2\", weight=10),\n \"memory\": DesiredWorkerLabel(\n value=256,\n required=True,\n comparator=WorkerLabelComparator.LESS_THAN,\n ),\n },\n)\n\n\n\n# > AffinityTask\nasync def step(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n if ctx.worker.labels().get(\"model\") != \"fancy-ai-model-v2\":\n ctx.worker.upsert_labels({\"model\": \"unset\"})\n # DO WORK TO EVICT OLD MODEL / LOAD NEW MODEL\n ctx.worker.upsert_labels({\"model\": \"fancy-ai-model-v2\"})\n\n return {\"worker\": ctx.worker.id()}\n\n\n\n\ndef main() -> None:\n\n # > AffinityWorker\n worker = hatchet.worker(\n \"affinity-worker\",\n slots=10,\n labels={\n \"model\": \"fancy-ai-model-v2\",\n \"memory\": 512,\n },\n workflows=[affinity_worker_workflow],\n )\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/affinity_workers/worker.py", - "blocks": { - "affinityworkflow": { - "start": 8, - "stop": 22 - }, - "affinitytask": { - "start": 26, - "stop": 34 - }, - "affinityworker": { - "start": 40, - "stop": 51 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/api/api.ts b/frontend/docs/lib/generated/snips/python/api/api.ts deleted file mode 100644 index 841930559..000000000 --- a/frontend/docs/lib/generated/snips/python/api/api.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n workflow_list = hatchet.workflows.list()\n rows = workflow_list.rows or []\n\n for workflow in rows:\n print(workflow.name)\n print(workflow.metadata.id)\n print(workflow.metadata.created_at)\n print(workflow.metadata.updated_at)\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/api/api.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/api/async_api.ts b/frontend/docs/lib/generated/snips/python/api/async_api.ts deleted file mode 100644 index 256c71b13..000000000 --- a/frontend/docs/lib/generated/snips/python/api/async_api.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nasync def main() -> None:\n workflow_list = await hatchet.workflows.aio_list()\n rows = workflow_list.rows or []\n\n for workflow in rows:\n print(workflow.name)\n print(workflow.metadata.id)\n print(workflow.metadata.created_at)\n print(workflow.metadata.updated_at)\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/api/async_api.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/api/index.ts b/frontend/docs/lib/generated/snips/python/api/index.ts deleted file mode 100644 index 3c70196ed..000000000 --- a/frontend/docs/lib/generated/snips/python/api/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import api from './api'; -import async_api from './async_api'; - -export { api } -export { async_api } diff --git a/frontend/docs/lib/generated/snips/python/blocked_async/blocking_example_trigger.ts b/frontend/docs/lib/generated/snips/python/blocked_async/blocking_example_trigger.ts deleted file mode 100644 index 2e6a93bba..000000000 --- a/frontend/docs/lib/generated/snips/python/blocked_async/blocking_example_trigger.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Trigger\nimport time\n\nfrom examples.blocked_async.blocking_example_worker import (\n blocking,\n non_blocking_async,\n non_blocking_sync,\n)\n\nnon_blocking_sync.run_no_wait()\nnon_blocking_async.run_no_wait()\n\ntime.sleep(1)\n\nblocking.run_no_wait()\n\ntime.sleep(1)\n\nnon_blocking_sync.run_no_wait()\n\n", - "source": "out/python/blocked_async/blocking_example_trigger.py", - "blocks": { - "trigger": { - "start": 2, - "stop": 20 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/blocked_async/blocking_example_worker.ts b/frontend/docs/lib/generated/snips/python/blocked_async/blocking_example_worker.ts deleted file mode 100644 index e1ab83306..000000000 --- a/frontend/docs/lib/generated/snips/python/blocked_async/blocking_example_worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Worker\nimport asyncio\nimport time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet()\n\nSLEEP_TIME = 6\n\n\n@hatchet.task()\nasync def non_blocking_async(input: EmptyModel, ctx: Context) -> None:\n for i in range(SLEEP_TIME):\n print(\"Non blocking async\", i)\n await asyncio.sleep(1)\n\n\n@hatchet.task()\ndef non_blocking_sync(input: EmptyModel, ctx: Context) -> None:\n for i in range(SLEEP_TIME):\n print(\"Non blocking sync\", i)\n time.sleep(1)\n\n\n@hatchet.task()\nasync def blocking(input: EmptyModel, ctx: Context) -> None:\n for i in range(SLEEP_TIME):\n print(\"Blocking\", i)\n time.sleep(1)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"test-worker\", workflows=[non_blocking_async, non_blocking_sync, blocking]\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/blocked_async/blocking_example_worker.py", - "blocks": { - "worker": { - "start": 2, - "stop": 32 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/blocked_async/debugging.ts b/frontend/docs/lib/generated/snips/python/blocked_async/debugging.ts deleted file mode 100644 index 7f1a3d668..000000000 --- a/frontend/docs/lib/generated/snips/python/blocked_async/debugging.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Functions\nimport asyncio\nimport time\n\nSLEEP_TIME = 3\n\n\nasync def blocking() -> None:\n for i in range(SLEEP_TIME):\n print(\"Blocking\", i)\n time.sleep(1)\n\n\nasync def non_blocking(task_id: str = \"Non-blocking\") -> None:\n for i in range(SLEEP_TIME):\n print(task_id, i)\n await asyncio.sleep(1)\n\n\n\n\n# > Blocked\nasync def blocked() -> None:\n loop = asyncio.get_event_loop()\n\n await asyncio.gather(\n *[\n loop.create_task(blocking()),\n loop.create_task(non_blocking()),\n ]\n )\n\n\n\n\n# > Unblocked\nasync def working() -> None:\n loop = asyncio.get_event_loop()\n\n await asyncio.gather(\n *[\n loop.create_task(non_blocking(\"A\")),\n loop.create_task(non_blocking(\"B\")),\n ]\n )\n\n\n\n\nif __name__ == \"__main__\":\n asyncio.run(blocked())\n asyncio.run(working())\n", - "source": "out/python/blocked_async/debugging.py", - "blocks": { - "functions": { - "start": 2, - "stop": 19 - }, - "blocked": { - "start": 23, - "stop": 33 - }, - "unblocked": { - "start": 37, - "stop": 47 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/blocked_async/index.ts b/frontend/docs/lib/generated/snips/python/blocked_async/index.ts deleted file mode 100644 index bb3e8e9c3..000000000 --- a/frontend/docs/lib/generated/snips/python/blocked_async/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import blocking_example_trigger from './blocking_example_trigger'; -import blocking_example_worker from './blocking_example_worker'; -import debugging from './debugging'; -import trigger from './trigger'; -import worker from './worker'; - -export { blocking_example_trigger } -export { blocking_example_worker } -export { debugging } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/blocked_async/trigger.ts b/frontend/docs/lib/generated/snips/python/blocked_async/trigger.ts deleted file mode 100644 index b2ed1c75f..000000000 --- a/frontend/docs/lib/generated/snips/python/blocked_async/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.blocked_async.worker import blocked_worker_workflow\nfrom hatchet_sdk import TriggerWorkflowOptions\n\nblocked_worker_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata={\"hello\": \"moon\"}),\n)\n", - "source": "out/python/blocked_async/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/blocked_async/worker.ts b/frontend/docs/lib/generated/snips/python/blocked_async/worker.ts deleted file mode 100644 index 4f170f21d..000000000 --- a/frontend/docs/lib/generated/snips/python/blocked_async/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import hashlib\nimport time\nfrom datetime import timedelta\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# WARNING: this is an example of what NOT to do\n# This workflow is intentionally blocking the main thread\n# and will block the worker from processing other workflows\n#\n# You do not want to run long sync functions in an async def function\n\nblocked_worker_workflow = hatchet.workflow(name=\"Blocked\")\n\n\n@blocked_worker_workflow.task(execution_timeout=timedelta(seconds=11), retries=3)\nasync def step1(input: EmptyModel, ctx: Context) -> dict[str, str | int | float]:\n print(\"Executing step1\")\n\n # CPU-bound task: Calculate a large number of SHA-256 hashes\n start_time = time.time()\n iterations = 10_000_000\n for i in range(iterations):\n hashlib.sha256(f\"data{i}\".encode()).hexdigest()\n\n end_time = time.time()\n execution_time = end_time - start_time\n\n print(f\"Completed {iterations} hash calculations in {execution_time:.2f} seconds\")\n\n return {\n \"step1\": \"step1\",\n \"iterations\": iterations,\n \"execution_time\": execution_time,\n }\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"blocked-worker\", slots=3, workflows=[blocked_worker_workflow]\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/blocked_async/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_fanout/bulk_trigger.ts b/frontend/docs/lib/generated/snips/python/bulk_fanout/bulk_trigger.ts deleted file mode 100644 index e2141ac55..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_fanout/bulk_trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom examples.bulk_fanout.worker import ParentInput, bulk_parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\nhatchet = Hatchet()\n\n\nasync def main() -> None:\n results = bulk_parent_wf.run_many(\n workflows=[\n bulk_parent_wf.create_bulk_run_item(\n input=ParentInput(n=i),\n options=TriggerWorkflowOptions(\n additional_metadata={\n \"bulk-trigger\": i,\n \"hello-{i}\": \"earth-{i}\",\n }\n ),\n )\n for i in range(20)\n ],\n )\n\n for result in results:\n print(result)\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/bulk_fanout/bulk_trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_fanout/index.ts b/frontend/docs/lib/generated/snips/python/bulk_fanout/index.ts deleted file mode 100644 index 230668c94..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_fanout/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import bulk_trigger from './bulk_trigger'; -import stream from './stream'; -import test_bulk_fanout from './test_bulk_fanout'; -import trigger from './trigger'; -import worker from './worker'; - -export { bulk_trigger } -export { stream } -export { test_bulk_fanout } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/bulk_fanout/stream.ts b/frontend/docs/lib/generated/snips/python/bulk_fanout/stream.ts deleted file mode 100644 index 22cbebac3..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_fanout/stream.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nimport random\n\nfrom examples.bulk_fanout.worker import ParentInput, bulk_parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\n\nasync def main() -> None:\n hatchet = Hatchet()\n\n # Generate a random stream key to use to track all\n # stream events for this workflow run.\n\n streamKey = \"streamKey\"\n streamVal = f\"sk-{random.randint(1, 100)}\"\n\n # Specify the stream key as additional metadata\n # when running the workflow.\n\n # This key gets propagated to all child workflows\n # and can have an arbitrary property name.\n bulk_parent_wf.run(\n input=ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={streamKey: streamVal}),\n )\n\n # Stream all events for the additional meta key value\n listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal)\n\n async for event in listener:\n print(event.type, event.payload)\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/bulk_fanout/stream.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_fanout/test_bulk_fanout.ts b/frontend/docs/lib/generated/snips/python/bulk_fanout/test_bulk_fanout.ts deleted file mode 100644 index dc1a2cc43..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_fanout/test_bulk_fanout.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import pytest\n\nfrom examples.bulk_fanout.worker import ParentInput, bulk_parent_wf\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run() -> None:\n result = await bulk_parent_wf.aio_run(input=ParentInput(n=12))\n\n assert len(result[\"spawn\"][\"results\"]) == 12\n", - "source": "out/python/bulk_fanout/test_bulk_fanout.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_fanout/trigger.ts b/frontend/docs/lib/generated/snips/python/bulk_fanout/trigger.ts deleted file mode 100644 index 2544e26d5..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_fanout/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.bulk_fanout.worker import ParentInput, bulk_parent_wf\nfrom hatchet_sdk import TriggerWorkflowOptions\n\nbulk_parent_wf.run(\n ParentInput(n=999),\n TriggerWorkflowOptions(additional_metadata={\"no-dedupe\": \"world\"}),\n)\n", - "source": "out/python/bulk_fanout/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_fanout/worker.ts b/frontend/docs/lib/generated/snips/python/bulk_fanout/worker.ts deleted file mode 100644 index af45cae36..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_fanout/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from datetime import timedelta\nfrom typing import Any\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\nhatchet = Hatchet(debug=True)\n\n\nclass ParentInput(BaseModel):\n n: int = 100\n\n\nclass ChildInput(BaseModel):\n a: str\n\n\nbulk_parent_wf = hatchet.workflow(name=\"BulkFanoutParent\", input_validator=ParentInput)\nbulk_child_wf = hatchet.workflow(name=\"BulkFanoutChild\", input_validator=ChildInput)\n\n\n# > BulkFanoutParent\n@bulk_parent_wf.task(execution_timeout=timedelta(minutes=5))\nasync def spawn(input: ParentInput, ctx: Context) -> dict[str, list[dict[str, Any]]]:\n # 👀 Create each workflow run to spawn\n child_workflow_runs = [\n bulk_child_wf.create_bulk_run_item(\n input=ChildInput(a=str(i)),\n key=f\"child{i}\",\n options=TriggerWorkflowOptions(additional_metadata={\"hello\": \"earth\"}),\n )\n for i in range(input.n)\n ]\n\n # 👀 Run workflows in bulk to improve performance\n spawn_results = await bulk_child_wf.aio_run_many(child_workflow_runs)\n\n return {\"results\": spawn_results}\n\n\n\n\n@bulk_child_wf.task()\ndef process(input: ChildInput, ctx: Context) -> dict[str, str]:\n print(f\"child process {input.a}\")\n return {\"status\": \"success \" + input.a}\n\n\n@bulk_child_wf.task()\ndef process2(input: ChildInput, ctx: Context) -> dict[str, str]:\n print(\"child process2\")\n return {\"status2\": \"success\"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"fanout-worker\", slots=40, workflows=[bulk_parent_wf, bulk_child_wf]\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/bulk_fanout/worker.py", - "blocks": { - "bulkfanoutparent": { - "start": 25, - "stop": 42 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_operations/cancel.ts b/frontend/docs/lib/generated/snips/python/bulk_operations/cancel.ts deleted file mode 100644 index ff5b1c727..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_operations/cancel.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Setup\n\nfrom datetime import datetime, timedelta, timezone\n\nfrom hatchet_sdk import BulkCancelReplayOpts, Hatchet, RunFilter, V1TaskStatus\n\nhatchet = Hatchet()\n\nworkflows = hatchet.workflows.list()\n\nassert workflows.rows\n\nworkflow = workflows.rows[0]\n\n\n# > List runs\nworkflow_runs = hatchet.runs.list(workflow_ids=[workflow.metadata.id])\n\n# > Cancel by run ids\nworkflow_run_ids = [workflow_run.metadata.id for workflow_run in workflow_runs.rows]\n\nbulk_cancel_by_ids = BulkCancelReplayOpts(ids=workflow_run_ids)\n\nhatchet.runs.bulk_cancel(bulk_cancel_by_ids)\n\n# > Cancel by filters\n\nbulk_cancel_by_filters = BulkCancelReplayOpts(\n filters=RunFilter(\n since=datetime.today() - timedelta(days=1),\n until=datetime.now(tz=timezone.utc),\n statuses=[V1TaskStatus.RUNNING],\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\"key\": \"value\"},\n )\n)\n\nhatchet.runs.bulk_cancel(bulk_cancel_by_filters)\n", - "source": "out/python/bulk_operations/cancel.py", - "blocks": { - "setup": { - "start": 2, - "stop": 14 - }, - "list_runs": { - "start": 17, - "stop": 17 - }, - "cancel_by_run_ids": { - "start": 20, - "stop": 24 - }, - "cancel_by_filters": { - "start": 27, - "stop": 38 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_operations/index.ts b/frontend/docs/lib/generated/snips/python/bulk_operations/index.ts deleted file mode 100644 index 3a751f719..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_operations/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import cancel from './cancel'; -import replay from './replay'; -import test_bulk_replay from './test_bulk_replay'; -import worker from './worker'; - -export { cancel } -export { replay } -export { test_bulk_replay } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/bulk_operations/replay.ts b/frontend/docs/lib/generated/snips/python/bulk_operations/replay.ts deleted file mode 100644 index 953953336..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_operations/replay.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Setup\n\nfrom datetime import datetime, timedelta, timezone\n\nfrom hatchet_sdk import BulkCancelReplayOpts, Hatchet, RunFilter, V1TaskStatus\n\nhatchet = Hatchet()\n\nworkflows = hatchet.workflows.list()\n\nassert workflows.rows\n\nworkflow = workflows.rows[0]\n\n\n# > List runs\nworkflow_runs = hatchet.runs.list(workflow_ids=[workflow.metadata.id])\n\n# > Replay by run ids\nworkflow_run_ids = [workflow_run.metadata.id for workflow_run in workflow_runs.rows]\n\nbulk_replay_by_ids = BulkCancelReplayOpts(ids=workflow_run_ids)\n\nhatchet.runs.bulk_replay(bulk_replay_by_ids)\n\n# > Replay by filters\nbulk_replay_by_filters = BulkCancelReplayOpts(\n filters=RunFilter(\n since=datetime.today() - timedelta(days=1),\n until=datetime.now(tz=timezone.utc),\n statuses=[V1TaskStatus.RUNNING],\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\"key\": \"value\"},\n )\n)\n\nhatchet.runs.bulk_replay(bulk_replay_by_filters)\n", - "source": "out/python/bulk_operations/replay.py", - "blocks": { - "setup": { - "start": 2, - "stop": 14 - }, - "list_runs": { - "start": 17, - "stop": 17 - }, - "replay_by_run_ids": { - "start": 20, - "stop": 24 - }, - "replay_by_filters": { - "start": 27, - "stop": 37 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_operations/test_bulk_replay.ts b/frontend/docs/lib/generated/snips/python/bulk_operations/test_bulk_replay.ts deleted file mode 100644 index e471b9a0b..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_operations/test_bulk_replay.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom datetime import datetime, timedelta, timezone\nfrom uuid import uuid4\n\nimport pytest\n\nfrom examples.bulk_operations.worker import (\n bulk_replay_test_1,\n bulk_replay_test_2,\n bulk_replay_test_3,\n)\nfrom hatchet_sdk import BulkCancelReplayOpts, Hatchet, RunFilter, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_bulk_replay(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n n = 100\n\n with pytest.raises(Exception):\n await bulk_replay_test_1.aio_run_many(\n [\n bulk_replay_test_1.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata={\n \"test_run_id\": test_run_id,\n }\n )\n )\n for _ in range(n + 1)\n ]\n )\n\n with pytest.raises(Exception):\n await bulk_replay_test_2.aio_run_many(\n [\n bulk_replay_test_2.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata={\n \"test_run_id\": test_run_id,\n }\n )\n )\n for _ in range((n // 2) - 1)\n ]\n )\n\n with pytest.raises(Exception):\n await bulk_replay_test_3.aio_run_many(\n [\n bulk_replay_test_3.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata={\n \"test_run_id\": test_run_id,\n }\n )\n )\n for _ in range((n // 2) - 2)\n ]\n )\n\n workflow_ids = [\n bulk_replay_test_1.id,\n bulk_replay_test_2.id,\n bulk_replay_test_3.id,\n ]\n\n ## Should result in two batches of replays\n await hatchet.runs.aio_bulk_replay(\n opts=BulkCancelReplayOpts(\n filters=RunFilter(\n workflow_ids=workflow_ids,\n since=datetime.now(tz=timezone.utc) - timedelta(minutes=2),\n additional_metadata={\"test_run_id\": test_run_id},\n )\n )\n )\n\n await asyncio.sleep(10)\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=workflow_ids,\n since=datetime.now(tz=timezone.utc) - timedelta(minutes=2),\n additional_metadata={\"test_run_id\": test_run_id},\n limit=1000,\n )\n\n assert len(runs.rows) == n + 1 + (n // 2 - 1) + (n // 2 - 2)\n\n for run in runs.rows:\n assert run.status == V1TaskStatus.COMPLETED\n assert run.retry_count == 1\n assert run.attempt == 2\n\n assert (\n len([r for r in runs.rows if r.workflow_id == bulk_replay_test_1.id]) == n + 1\n )\n assert (\n len([r for r in runs.rows if r.workflow_id == bulk_replay_test_2.id])\n == n // 2 - 1\n )\n assert (\n len([r for r in runs.rows if r.workflow_id == bulk_replay_test_3.id])\n == n // 2 - 2\n )\n", - "source": "out/python/bulk_operations/test_bulk_replay.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/bulk_operations/worker.ts b/frontend/docs/lib/generated/snips/python/bulk_operations/worker.ts deleted file mode 100644 index bdac8aa4d..000000000 --- a/frontend/docs/lib/generated/snips/python/bulk_operations/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n@hatchet.task()\ndef bulk_replay_test_1(input: EmptyModel, ctx: Context) -> None:\n print(\"retrying bulk replay test task\", ctx.retry_count)\n if ctx.retry_count == 0:\n raise ValueError(\"This is a test error to trigger a retry.\")\n\n\n@hatchet.task()\ndef bulk_replay_test_2(input: EmptyModel, ctx: Context) -> None:\n print(\"retrying bulk replay test task\", ctx.retry_count)\n if ctx.retry_count == 0:\n raise ValueError(\"This is a test error to trigger a retry.\")\n\n\n@hatchet.task()\ndef bulk_replay_test_3(input: EmptyModel, ctx: Context) -> None:\n print(\"retrying bulk replay test task\", ctx.retry_count)\n if ctx.retry_count == 0:\n raise ValueError(\"This is a test error to trigger a retry.\")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"bulk-replay-test-worker\",\n workflows=[bulk_replay_test_1, bulk_replay_test_2, bulk_replay_test_3],\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/bulk_operations/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/cancellation/index.ts b/frontend/docs/lib/generated/snips/python/cancellation/index.ts deleted file mode 100644 index 9283038ab..000000000 --- a/frontend/docs/lib/generated/snips/python/cancellation/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_cancellation from './test_cancellation'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_cancellation } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/cancellation/test_cancellation.ts b/frontend/docs/lib/generated/snips/python/cancellation/test_cancellation.ts deleted file mode 100644 index b8453bd06..000000000 --- a/frontend/docs/lib/generated/snips/python/cancellation/test_cancellation.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nimport pytest\n\nfrom examples.cancellation.worker import cancellation_workflow\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_cancellation(hatchet: Hatchet) -> None:\n ref = await cancellation_workflow.aio_run_no_wait()\n\n \"\"\"Sleep for a long time since we only need cancellation to happen _eventually_\"\"\"\n await asyncio.sleep(10)\n\n for i in range(30):\n run = await hatchet.runs.aio_get(ref.workflow_run_id)\n\n if run.run.status == V1TaskStatus.RUNNING:\n await asyncio.sleep(1)\n continue\n\n assert run.run.status == V1TaskStatus.CANCELLED\n assert not run.run.output\n\n break\n else:\n assert False, \"Workflow run did not cancel in time\"\n", - "source": "out/python/cancellation/test_cancellation.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/cancellation/trigger.ts b/frontend/docs/lib/generated/snips/python/cancellation/trigger.ts deleted file mode 100644 index 02debbd28..000000000 --- a/frontend/docs/lib/generated/snips/python/cancellation/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\n\nfrom examples.cancellation.worker import cancellation_workflow, hatchet\n\nid = cancellation_workflow.run_no_wait()\n\ntime.sleep(5)\n\nhatchet.runs.cancel(id.workflow_run_id)\n", - "source": "out/python/cancellation/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/cancellation/worker.ts b/frontend/docs/lib/generated/snips/python/cancellation/worker.ts deleted file mode 100644 index 239c0bb86..000000000 --- a/frontend/docs/lib/generated/snips/python/cancellation/worker.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nimport time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\ncancellation_workflow = hatchet.workflow(name=\"CancelWorkflow\")\n\n\n# > Self-cancelling task\n@cancellation_workflow.task()\nasync def self_cancel(input: EmptyModel, ctx: Context) -> dict[str, str]:\n await asyncio.sleep(2)\n\n ## Cancel the task\n await ctx.aio_cancel()\n\n await asyncio.sleep(10)\n\n return {\"error\": \"Task should have been cancelled\"}\n\n\n\n\n# > Checking exit flag\n@cancellation_workflow.task()\ndef check_flag(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(3):\n time.sleep(1)\n\n # Note: Checking the status of the exit flag is mostly useful for cancelling\n # sync tasks without needing to forcibly kill the thread they're running on.\n if ctx.exit_flag:\n print(\"Task has been cancelled\")\n raise ValueError(\"Task has been cancelled\")\n\n return {\"error\": \"Task should have been cancelled\"}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"cancellation-worker\", workflows=[cancellation_workflow])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/cancellation/worker.py", - "blocks": { - "self_cancelling_task": { - "start": 12, - "stop": 23 - }, - "checking_exit_flag": { - "start": 27, - "stop": 40 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/child/bulk.ts b/frontend/docs/lib/generated/snips/python/child/bulk.ts deleted file mode 100644 index 36b9415d1..000000000 --- a/frontend/docs/lib/generated/snips/python/child/bulk.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\n# > Running a Task\nfrom examples.child.worker import SimpleInput, child_task\n\nchild_task.run(SimpleInput(message=\"Hello, World!\"))\n\n\nasync def main() -> None:\n # > Bulk Run a Task\n greetings = [\"Hello, World!\", \"Hello, Moon!\", \"Hello, Mars!\"]\n\n results = await child_task.aio_run_many(\n [\n # run each greeting as a task in parallel\n child_task.create_bulk_run_item(\n input=SimpleInput(message=greeting),\n )\n for greeting in greetings\n ]\n )\n\n # this will await all results and return a list of results\n print(results)\n\n # > Running Multiple Tasks\n result1 = child_task.aio_run(SimpleInput(message=\"Hello, World!\"))\n result2 = child_task.aio_run(SimpleInput(message=\"Hello, Moon!\"))\n\n # gather the results of the two tasks\n gather_results = await asyncio.gather(result1, result2)\n\n # print the results of the two tasks\n print(gather_results[0][\"transformed_message\"])\n print(gather_results[1][\"transformed_message\"])\n", - "source": "out/python/child/bulk.py", - "blocks": { - "running_a_task": { - "start": 4, - "stop": 6 - }, - "bulk_run_a_task": { - "start": 11, - "stop": 24 - }, - "running_multiple_tasks": { - "start": 27, - "stop": 35 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/child/index.ts b/frontend/docs/lib/generated/snips/python/child/index.ts deleted file mode 100644 index 33e46672b..000000000 --- a/frontend/docs/lib/generated/snips/python/child/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import bulk from './bulk'; -import simple_fanout from './simple-fanout'; -import trigger from './trigger'; -import worker from './worker'; - -export { bulk } -export { simple_fanout } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/child/simple-fanout.ts b/frontend/docs/lib/generated/snips/python/child/simple-fanout.ts deleted file mode 100644 index 03ac69671..000000000 --- a/frontend/docs/lib/generated/snips/python/child/simple-fanout.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from typing import Any\n\nfrom examples.child.worker import SimpleInput, child_task\nfrom hatchet_sdk.context.context import Context\nfrom hatchet_sdk.hatchet import Hatchet\nfrom hatchet_sdk.runnables.types import EmptyModel\n\nhatchet = Hatchet(debug=True)\n\n\n# > Running a Task from within a Task\n@hatchet.task(name=\"SpawnTask\")\nasync def spawn(input: EmptyModel, ctx: Context) -> dict[str, Any]:\n # Simply run the task with the input we received\n result = await child_task.aio_run(\n input=SimpleInput(message=\"Hello, World!\"),\n )\n\n return {\"results\": result}\n\n\n", - "source": "out/python/child/simple-fanout.py", - "blocks": { - "running_a_task_from_within_a_task": { - "start": 12, - "stop": 21 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/child/trigger.ts b/frontend/docs/lib/generated/snips/python/child/trigger.ts deleted file mode 100644 index bdc40c50b..000000000 --- a/frontend/docs/lib/generated/snips/python/child/trigger.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# ruff: noqa: E402\n\nimport asyncio\n\n# > Running a Task\nfrom examples.child.worker import SimpleInput, child_task\n\nchild_task.run(SimpleInput(message=\"Hello, World!\"))\n\n# > Schedule a Task\nfrom datetime import datetime, timedelta, timezone\n\nchild_task.schedule(\n datetime.now(tz=timezone.utc) + timedelta(minutes=5),\n SimpleInput(message=\"Hello, World!\"),\n)\n\n\nasync def main() -> None:\n # > Running a Task AIO\n result = await child_task.aio_run(SimpleInput(message=\"Hello, World!\"))\n\n print(result)\n\n # > Running Multiple Tasks\n result1 = child_task.aio_run(SimpleInput(message=\"Hello, World!\"))\n result2 = child_task.aio_run(SimpleInput(message=\"Hello, Moon!\"))\n\n # gather the results of the two tasks\n results = await asyncio.gather(result1, result2)\n\n # print the results of the two tasks\n print(results[0][\"transformed_message\"])\n print(results[1][\"transformed_message\"])\n", - "source": "out/python/child/trigger.py", - "blocks": { - "running_a_task": { - "start": 6, - "stop": 8 - }, - "schedule_a_task": { - "start": 11, - "stop": 16 - }, - "running_a_task_aio": { - "start": 21, - "stop": 21 - }, - "running_multiple_tasks": { - "start": 26, - "stop": 34 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/child/worker.ts b/frontend/docs/lib/generated/snips/python/child/worker.ts deleted file mode 100644 index a7595e2bd..000000000 --- a/frontend/docs/lib/generated/snips/python/child/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Simple\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass SimpleInput(BaseModel):\n message: str\n\n\nclass SimpleOutput(BaseModel):\n transformed_message: str\n\n\nchild_task = hatchet.workflow(name=\"SimpleWorkflow\", input_validator=SimpleInput)\n\n\n@child_task.task(name=\"step1\")\ndef step1(input: SimpleInput, ctx: Context) -> SimpleOutput:\n print(\"executed step1: \", input.message)\n return SimpleOutput(transformed_message=input.message.upper())\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"test-worker\", slots=1, workflows=[child_task])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/child/worker.py", - "blocks": { - "simple": { - "start": 2, - "stop": 26 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit/index.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit/index.ts deleted file mode 100644 index c443f556e..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit/trigger.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit/trigger.ts deleted file mode 100644 index 34cca966f..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.concurrency_limit.worker import WorkflowInput, concurrency_limit_workflow\n\nconcurrency_limit_workflow.run(WorkflowInput(group_key=\"test\", run=1))\n", - "source": "out/python/concurrency_limit/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit/worker.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit/worker.ts deleted file mode 100644 index ec6a4891d..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\nfrom typing import Any\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\n# > Workflow\nclass WorkflowInput(BaseModel):\n run: int\n group_key: str\n\n\nconcurrency_limit_workflow = hatchet.workflow(\n name=\"ConcurrencyDemoWorkflow\",\n concurrency=ConcurrencyExpression(\n expression=\"input.group_key\",\n max_runs=5,\n limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS,\n ),\n input_validator=WorkflowInput,\n)\n\n\n\n@concurrency_limit_workflow.task()\ndef step1(input: WorkflowInput, ctx: Context) -> dict[str, Any]:\n time.sleep(3)\n print(\"executed step1\")\n return {\"run\": input.run}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"concurrency-demo-worker\", slots=10, workflows=[concurrency_limit_workflow]\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/concurrency_limit/worker.py", - "blocks": { - "workflow": { - "start": 17, - "stop": 31 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/index.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/index.ts deleted file mode 100644 index 890415139..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_concurrency_limit_rr from './test_concurrency_limit_rr'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_concurrency_limit_rr } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/test_concurrency_limit_rr.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/test_concurrency_limit_rr.ts deleted file mode 100644 index 5ab780558..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/test_concurrency_limit_rr.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\n\nimport pytest\n\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom hatchet_sdk.workflow_run import WorkflowRunRef\n\n\n@pytest.mark.skip(reason=\"The timing for this test is not reliable\")\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run() -> None:\n num_groups = 2\n runs: list[WorkflowRunRef] = []\n\n # Start all runs\n for i in range(1, num_groups + 1):\n run = concurrency_limit_rr_workflow.run_no_wait()\n runs.append(run)\n run = concurrency_limit_rr_workflow.run_no_wait()\n runs.append(run)\n\n # Wait for all results\n successful_runs = []\n cancelled_runs = []\n\n start_time = time.time()\n\n # Process each run individually\n for i, run in enumerate(runs, start=1):\n try:\n result = await run.aio_result()\n successful_runs.append((i, result))\n except Exception as e:\n if \"CANCELLED_BY_CONCURRENCY_LIMIT\" in str(e):\n cancelled_runs.append((i, str(e)))\n else:\n raise # Re-raise if it's an unexpected error\n\n end_time = time.time()\n total_time = end_time - start_time\n\n # Check that we have the correct number of successful and cancelled runs\n assert (\n len(successful_runs) == 4\n ), f\"Expected 4 successful runs, got {len(successful_runs)}\"\n assert (\n len(cancelled_runs) == 0\n ), f\"Expected 0 cancelled run, got {len(cancelled_runs)}\"\n\n # Check that the total time is close to 2 seconds\n assert (\n 3.8 <= total_time <= 7\n ), f\"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds\"\n\n print(f\"Total execution time: {total_time:.2f} seconds\")\n", - "source": "out/python/concurrency_limit_rr/test_concurrency_limit_rr.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/trigger.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/trigger.ts deleted file mode 100644 index c6a0bef84..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.concurrency_limit_rr.worker import (\n WorkflowInput,\n concurrency_limit_rr_workflow,\n)\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\nfor i in range(200):\n group = \"0\"\n\n if i % 2 == 0:\n group = \"1\"\n\n concurrency_limit_rr_workflow.run(WorkflowInput(group=group))\n", - "source": "out/python/concurrency_limit_rr/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/worker.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/worker.ts deleted file mode 100644 index b0e58831b..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\n# > Concurrency Strategy With Key\nclass WorkflowInput(BaseModel):\n group: str\n\n\nconcurrency_limit_rr_workflow = hatchet.workflow(\n name=\"ConcurrencyDemoWorkflowRR\",\n concurrency=ConcurrencyExpression(\n expression=\"input.group\",\n max_runs=1,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n input_validator=WorkflowInput,\n)\n\n\n@concurrency_limit_rr_workflow.task()\ndef step1(input: WorkflowInput, ctx: Context) -> None:\n print(\"starting step1\")\n time.sleep(2)\n print(\"finished step1\")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"concurrency-demo-worker-rr\",\n slots=10,\n workflows=[concurrency_limit_rr_workflow],\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/concurrency_limit_rr/worker.py", - "blocks": { - "concurrency_strategy_with_key": { - "start": 16, - "stop": 28 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/event.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/event.ts deleted file mode 100644 index b15a09f6b..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/event.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import random\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# Create a list of events with desired distribution\nevents = [\"1\"] * 10000 + [\"0\"] * 100\nrandom.shuffle(events)\n\n# Send the shuffled events\nfor group in events:\n hatchet.event.push(\"concurrency-test\", {\"group\": group})\n", - "source": "out/python/concurrency_limit_rr_load/event.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/index.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/index.ts deleted file mode 100644 index 6c44274b5..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import event from './event'; -import worker from './worker'; - -export { event } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/worker.ts b/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/worker.ts deleted file mode 100644 index 404bcee2e..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_limit_rr_load/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import random\nimport time\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\nclass LoadRRInput(BaseModel):\n group: str\n\n\nload_rr_workflow = hatchet.workflow(\n name=\"LoadRoundRobin\",\n on_events=[\"concurrency-test\"],\n concurrency=ConcurrencyExpression(\n expression=\"input.group\",\n max_runs=1,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n input_validator=LoadRRInput,\n)\n\n\n@load_rr_workflow.on_failure_task()\ndef on_failure(input: LoadRRInput, context: Context) -> dict[str, str]:\n print(\"on_failure\")\n return {\"on_failure\": \"on_failure\"}\n\n\n@load_rr_workflow.task()\ndef step1(input: LoadRRInput, context: Context) -> dict[str, str]:\n print(\"starting step1\")\n time.sleep(random.randint(2, 20))\n print(\"finished step1\")\n return {\"step1\": \"step1\"}\n\n\n@load_rr_workflow.task(\n retries=3,\n backoff_factor=5,\n backoff_max_seconds=60,\n)\ndef step2(sinput: LoadRRInput, context: Context) -> dict[str, str]:\n print(\"starting step2\")\n if random.random() < 0.5: # 1% chance of failure\n raise Exception(\"Random failure in step2\")\n time.sleep(2)\n print(\"finished step2\")\n return {\"step2\": \"step2\"}\n\n\n@load_rr_workflow.task()\ndef step3(input: LoadRRInput, context: Context) -> dict[str, str]:\n print(\"starting step3\")\n time.sleep(0.2)\n print(\"finished step3\")\n return {\"step3\": \"step3\"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"concurrency-demo-worker-rr\", slots=50, workflows=[load_rr_workflow]\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/concurrency_limit_rr_load/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/index.ts b/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/index.ts deleted file mode 100644 index e1442aa2b..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_multiple_concurrency_keys from './test_multiple_concurrency_keys'; -import worker from './worker'; - -export { test_multiple_concurrency_keys } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/test_multiple_concurrency_keys.ts b/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/test_multiple_concurrency_keys.ts deleted file mode 100644 index c06cc7ae0..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/test_multiple_concurrency_keys.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom collections import Counter\nfrom datetime import datetime\nfrom random import choice\nfrom typing import Literal\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.concurrency_multiple_keys.worker import (\n DIGIT_MAX_RUNS,\n NAME_MAX_RUNS,\n WorkflowInput,\n concurrency_multiple_keys_workflow,\n)\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\n\nCharacter = Literal[\"Anna\", \"Vronsky\", \"Stiva\", \"Dolly\", \"Levin\", \"Karenin\"]\ncharacters: list[Character] = [\n \"Anna\",\n \"Vronsky\",\n \"Stiva\",\n \"Dolly\",\n \"Levin\",\n \"Karenin\",\n]\n\n\nclass RunMetadata(BaseModel):\n test_run_id: str\n key: str\n name: Character\n digit: str\n started_at: datetime\n finished_at: datetime\n\n @staticmethod\n def parse(task: V1TaskSummary) -> \"RunMetadata\":\n return RunMetadata(\n test_run_id=task.additional_metadata[\"test_run_id\"], # type: ignore\n key=task.additional_metadata[\"key\"], # type: ignore\n name=task.additional_metadata[\"name\"], # type: ignore\n digit=task.additional_metadata[\"digit\"], # type: ignore\n started_at=task.started_at or datetime.max,\n finished_at=task.finished_at or datetime.min,\n )\n\n def __str__(self) -> str:\n return self.key\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_multi_concurrency_key(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n\n run_refs = await concurrency_multiple_keys_workflow.aio_run_many_no_wait(\n [\n concurrency_multiple_keys_workflow.create_bulk_run_item(\n WorkflowInput(\n name=(name := choice(characters)),\n digit=(digit := choice([str(i) for i in range(6)])),\n ),\n options=TriggerWorkflowOptions(\n additional_metadata={\n \"test_run_id\": test_run_id,\n \"key\": f\"{name}-{digit}\",\n \"name\": name,\n \"digit\": digit,\n },\n ),\n )\n for _ in range(100)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(\n workflow_name=concurrency_multiple_keys_workflow.name,\n limit=1_000,\n )\n ).rows\n\n assert workflows\n\n workflow = next(\n (w for w in workflows if w.name == concurrency_multiple_keys_workflow.name),\n None,\n )\n\n assert workflow\n\n assert workflow.name == concurrency_multiple_keys_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n \"test_run_id\": test_run_id,\n },\n limit=1_000,\n )\n\n sorted_runs = sorted(\n [RunMetadata.parse(r) for r in runs.rows], key=lambda r: r.started_at\n )\n\n overlapping_groups: dict[int, list[RunMetadata]] = {}\n\n for run in sorted_runs:\n has_group_membership = False\n\n if not overlapping_groups:\n overlapping_groups[1] = [run]\n continue\n\n if has_group_membership:\n continue\n\n for id, group in overlapping_groups.items():\n if all(are_overlapping(run, task) for task in group):\n overlapping_groups[id].append(run)\n has_group_membership = True\n break\n\n if not has_group_membership:\n overlapping_groups[len(overlapping_groups) + 1] = [run]\n\n assert {s.key for s in sorted_runs} == {\n k.key for v in overlapping_groups.values() for k in v\n }\n\n for id, group in overlapping_groups.items():\n assert is_valid_group(group), f\"Group {id} is not valid\"\n\n\ndef are_overlapping(x: RunMetadata, y: RunMetadata) -> bool:\n return (x.started_at < y.finished_at and x.finished_at > y.started_at) or (\n x.finished_at > y.started_at and x.started_at < y.finished_at\n )\n\n\ndef is_valid_group(group: list[RunMetadata]) -> bool:\n digits = Counter[str]()\n names = Counter[str]()\n\n for task in group:\n digits[task.digit] += 1\n names[task.name] += 1\n\n if any(v > DIGIT_MAX_RUNS for v in digits.values()):\n return False\n\n if any(v > NAME_MAX_RUNS for v in names.values()):\n return False\n\n return True\n", - "source": "out/python/concurrency_multiple_keys/test_multiple_concurrency_keys.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/worker.ts b/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/worker.ts deleted file mode 100644 index d8334731a..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_multiple_keys/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\nSLEEP_TIME = 2\nDIGIT_MAX_RUNS = 8\nNAME_MAX_RUNS = 3\n\n\n# > Concurrency Strategy With Key\nclass WorkflowInput(BaseModel):\n name: str\n digit: str\n\n\nconcurrency_multiple_keys_workflow = hatchet.workflow(\n name=\"ConcurrencyWorkflowManyKeys\",\n input_validator=WorkflowInput,\n)\n\n\n@concurrency_multiple_keys_workflow.task(\n concurrency=[\n ConcurrencyExpression(\n expression=\"input.digit\",\n max_runs=DIGIT_MAX_RUNS,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n ConcurrencyExpression(\n expression=\"input.name\",\n max_runs=NAME_MAX_RUNS,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n ]\n)\nasync def concurrency_task(input: WorkflowInput, ctx: Context) -> None:\n await asyncio.sleep(SLEEP_TIME)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"concurrency-worker-multiple-keys\",\n slots=10,\n workflows=[concurrency_multiple_keys_workflow],\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/concurrency_multiple_keys/worker.py", - "blocks": { - "concurrency_strategy_with_key": { - "start": 20, - "stop": 28 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/index.ts b/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/index.ts deleted file mode 100644 index 661edbd68..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_workflow_level_concurrency from './test_workflow_level_concurrency'; -import worker from './worker'; - -export { test_workflow_level_concurrency } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/test_workflow_level_concurrency.ts b/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/test_workflow_level_concurrency.ts deleted file mode 100644 index 55e046912..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/test_workflow_level_concurrency.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom collections import Counter\nfrom datetime import datetime\nfrom random import choice\nfrom typing import Literal\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.concurrency_workflow_level.worker import (\n DIGIT_MAX_RUNS,\n NAME_MAX_RUNS,\n WorkflowInput,\n concurrency_workflow_level_workflow,\n)\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\n\nCharacter = Literal[\"Anna\", \"Vronsky\", \"Stiva\", \"Dolly\", \"Levin\", \"Karenin\"]\ncharacters: list[Character] = [\n \"Anna\",\n \"Vronsky\",\n \"Stiva\",\n \"Dolly\",\n \"Levin\",\n \"Karenin\",\n]\n\n\nclass RunMetadata(BaseModel):\n test_run_id: str\n key: str\n name: Character\n digit: str\n started_at: datetime\n finished_at: datetime\n\n @staticmethod\n def parse(task: V1TaskSummary) -> \"RunMetadata\":\n return RunMetadata(\n test_run_id=task.additional_metadata[\"test_run_id\"], # type: ignore\n key=task.additional_metadata[\"key\"], # type: ignore\n name=task.additional_metadata[\"name\"], # type: ignore\n digit=task.additional_metadata[\"digit\"], # type: ignore\n started_at=task.started_at or datetime.max,\n finished_at=task.finished_at or datetime.min,\n )\n\n def __str__(self) -> str:\n return self.key\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_workflow_level_concurrency(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n\n run_refs = await concurrency_workflow_level_workflow.aio_run_many_no_wait(\n [\n concurrency_workflow_level_workflow.create_bulk_run_item(\n WorkflowInput(\n name=(name := choice(characters)),\n digit=(digit := choice([str(i) for i in range(6)])),\n ),\n options=TriggerWorkflowOptions(\n additional_metadata={\n \"test_run_id\": test_run_id,\n \"key\": f\"{name}-{digit}\",\n \"name\": name,\n \"digit\": digit,\n },\n ),\n )\n for _ in range(100)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(\n workflow_name=concurrency_workflow_level_workflow.name,\n limit=1_000,\n )\n ).rows\n\n assert workflows\n\n workflow = next(\n (w for w in workflows if w.name == concurrency_workflow_level_workflow.name),\n None,\n )\n\n assert workflow\n\n assert workflow.name == concurrency_workflow_level_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n \"test_run_id\": test_run_id,\n },\n limit=1_000,\n )\n\n sorted_runs = sorted(\n [RunMetadata.parse(r) for r in runs.rows], key=lambda r: r.started_at\n )\n\n overlapping_groups: dict[int, list[RunMetadata]] = {}\n\n for run in sorted_runs:\n has_group_membership = False\n\n if not overlapping_groups:\n overlapping_groups[1] = [run]\n continue\n\n if has_group_membership:\n continue\n\n for id, group in overlapping_groups.items():\n if all(are_overlapping(run, task) for task in group):\n overlapping_groups[id].append(run)\n has_group_membership = True\n break\n\n if not has_group_membership:\n overlapping_groups[len(overlapping_groups) + 1] = [run]\n\n for id, group in overlapping_groups.items():\n assert is_valid_group(group), f\"Group {id} is not valid\"\n\n\ndef are_overlapping(x: RunMetadata, y: RunMetadata) -> bool:\n return (x.started_at < y.finished_at and x.finished_at > y.started_at) or (\n x.finished_at > y.started_at and x.started_at < y.finished_at\n )\n\n\ndef is_valid_group(group: list[RunMetadata]) -> bool:\n digits = Counter[str]()\n names = Counter[str]()\n\n for task in group:\n digits[task.digit] += 1\n names[task.name] += 1\n\n if any(v > DIGIT_MAX_RUNS for v in digits.values()):\n return False\n\n if any(v > NAME_MAX_RUNS for v in names.values()):\n return False\n\n return True\n", - "source": "out/python/concurrency_workflow_level/test_workflow_level_concurrency.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/worker.ts b/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/worker.ts deleted file mode 100644 index 8b2e2da49..000000000 --- a/frontend/docs/lib/generated/snips/python/concurrency_workflow_level/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\nSLEEP_TIME = 2\nDIGIT_MAX_RUNS = 8\nNAME_MAX_RUNS = 3\n\n\n# > Multiple Concurrency Keys\nclass WorkflowInput(BaseModel):\n name: str\n digit: str\n\n\nconcurrency_workflow_level_workflow = hatchet.workflow(\n name=\"ConcurrencyWorkflowManyKeys\",\n input_validator=WorkflowInput,\n concurrency=[\n ConcurrencyExpression(\n expression=\"input.digit\",\n max_runs=DIGIT_MAX_RUNS,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n ConcurrencyExpression(\n expression=\"input.name\",\n max_runs=NAME_MAX_RUNS,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n ],\n)\n\n\n@concurrency_workflow_level_workflow.task()\nasync def task_1(input: WorkflowInput, ctx: Context) -> None:\n await asyncio.sleep(SLEEP_TIME)\n\n\n@concurrency_workflow_level_workflow.task()\nasync def task_2(input: WorkflowInput, ctx: Context) -> None:\n await asyncio.sleep(SLEEP_TIME)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"concurrency-worker-workflow-level\",\n slots=10,\n workflows=[concurrency_workflow_level_workflow],\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/concurrency_workflow_level/worker.py", - "blocks": { - "multiple_concurrency_keys": { - "start": 20, - "stop": 40 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/conditions/index.ts b/frontend/docs/lib/generated/snips/python/conditions/index.ts deleted file mode 100644 index dc8077c96..000000000 --- a/frontend/docs/lib/generated/snips/python/conditions/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_conditions from './test_conditions'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_conditions } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/conditions/test_conditions.ts b/frontend/docs/lib/generated/snips/python/conditions/test_conditions.ts deleted file mode 100644 index 7f4d94d6b..000000000 --- a/frontend/docs/lib/generated/snips/python/conditions/test_conditions.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nimport pytest\n\nfrom examples.conditions.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_waits(hatchet: Hatchet) -> None:\n ref = task_condition_workflow.run_no_wait()\n\n await asyncio.sleep(15)\n\n hatchet.event.push(\"skip_on_event:skip\", {})\n hatchet.event.push(\"wait_for_event:start\", {})\n\n result = await ref.aio_result()\n\n assert result[\"skip_on_event\"] == {\"skipped\": True}\n\n first_random_number = result[\"start\"][\"random_number\"]\n wait_for_event_random_number = result[\"wait_for_event\"][\"random_number\"]\n wait_for_sleep_random_number = result[\"wait_for_sleep\"][\"random_number\"]\n\n left_branch = result[\"left_branch\"]\n right_branch = result[\"right_branch\"]\n\n assert left_branch.get(\"skipped\") is True or right_branch.get(\"skipped\") is True\n\n skip_with_multiple_parents = result[\"skip_with_multiple_parents\"]\n\n assert skip_with_multiple_parents.get(\"skipped\") is True\n\n branch_random_number = left_branch.get(\"random_number\") or right_branch.get(\n \"random_number\"\n )\n\n result_sum = result[\"sum\"][\"sum\"]\n\n assert (\n result_sum\n == first_random_number\n + wait_for_event_random_number\n + wait_for_sleep_random_number\n + branch_random_number\n )\n", - "source": "out/python/conditions/test_conditions.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/conditions/trigger.ts b/frontend/docs/lib/generated/snips/python/conditions/trigger.ts deleted file mode 100644 index b91e8b4d1..000000000 --- a/frontend/docs/lib/generated/snips/python/conditions/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\n\nfrom examples.conditions.worker import hatchet, task_condition_workflow\n\ntask_condition_workflow.run_no_wait()\n\ntime.sleep(5)\n\nhatchet.event.push(\"skip_on_event:skip\", {})\nhatchet.event.push(\"wait_for_event:start\", {})\n", - "source": "out/python/conditions/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/conditions/worker.ts b/frontend/docs/lib/generated/snips/python/conditions/worker.ts deleted file mode 100644 index 4a332130f..000000000 --- a/frontend/docs/lib/generated/snips/python/conditions/worker.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Create a workflow\n\nimport random\nfrom datetime import timedelta\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n Context,\n EmptyModel,\n Hatchet,\n ParentCondition,\n SleepCondition,\n UserEventCondition,\n or_,\n)\n\nhatchet = Hatchet(debug=True)\n\n\nclass StepOutput(BaseModel):\n random_number: int\n\n\nclass RandomSum(BaseModel):\n sum: int\n\n\ntask_condition_workflow = hatchet.workflow(name=\"TaskConditionWorkflow\")\n\n\n\n# > Add base task\n@task_condition_workflow.task()\ndef start(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add wait for sleep\n@task_condition_workflow.task(\n parents=[start], wait_for=[SleepCondition(timedelta(seconds=10))]\n)\ndef wait_for_sleep(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add skip condition override\n@task_condition_workflow.task(\n parents=[start, wait_for_sleep],\n skip_if=[ParentCondition(parent=start, expression=\"output.random_number > 0\")],\n)\ndef skip_with_multiple_parents(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add skip on event\n@task_condition_workflow.task(\n parents=[start],\n wait_for=[SleepCondition(timedelta(seconds=30))],\n skip_if=[UserEventCondition(event_key=\"skip_on_event:skip\")],\n)\ndef skip_on_event(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add branching\n@task_condition_workflow.task(\n parents=[wait_for_sleep],\n skip_if=[\n ParentCondition(\n parent=wait_for_sleep,\n expression=\"output.random_number > 50\",\n )\n ],\n)\ndef left_branch(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@task_condition_workflow.task(\n parents=[wait_for_sleep],\n skip_if=[\n ParentCondition(\n parent=wait_for_sleep,\n expression=\"output.random_number <= 50\",\n )\n ],\n)\ndef right_branch(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add wait for event\n@task_condition_workflow.task(\n parents=[start],\n wait_for=[\n or_(\n SleepCondition(duration=timedelta(minutes=1)),\n UserEventCondition(event_key=\"wait_for_event:start\"),\n )\n ],\n)\ndef wait_for_event(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n\n\n# > Add sum\n@task_condition_workflow.task(\n parents=[\n start,\n wait_for_sleep,\n wait_for_event,\n skip_on_event,\n left_branch,\n right_branch,\n ],\n)\ndef sum(input: EmptyModel, ctx: Context) -> RandomSum:\n one = ctx.task_output(start).random_number\n two = ctx.task_output(wait_for_event).random_number\n three = ctx.task_output(wait_for_sleep).random_number\n four = (\n ctx.task_output(skip_on_event).random_number\n if not ctx.was_skipped(skip_on_event)\n else 0\n )\n\n five = (\n ctx.task_output(left_branch).random_number\n if not ctx.was_skipped(left_branch)\n else 0\n )\n six = (\n ctx.task_output(right_branch).random_number\n if not ctx.was_skipped(right_branch)\n else 0\n )\n\n return RandomSum(sum=one + two + three + four + five + six)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"dag-worker\", workflows=[task_condition_workflow])\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/conditions/worker.py", - "blocks": { - "create_a_workflow": { - "start": 2, - "stop": 30 - }, - "add_base_task": { - "start": 34, - "stop": 38 - }, - "add_wait_for_sleep": { - "start": 42, - "stop": 48 - }, - "add_skip_condition_override": { - "start": 52, - "stop": 59 - }, - "add_skip_on_event": { - "start": 63, - "stop": 71 - }, - "add_branching": { - "start": 75, - "stop": 100 - }, - "add_wait_for_event": { - "start": 104, - "stop": 116 - }, - "add_sum": { - "start": 120, - "stop": 153 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/cron/index.ts b/frontend/docs/lib/generated/snips/python/cron/index.ts deleted file mode 100644 index aa79cea3c..000000000 --- a/frontend/docs/lib/generated/snips/python/cron/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import programatic_async from './programatic-async'; -import programatic_sync from './programatic-sync'; -import workflow_definition from './workflow-definition'; - -export { programatic_async } -export { programatic_sync } -export { workflow_definition } diff --git a/frontend/docs/lib/generated/snips/python/cron/programatic-async.ts b/frontend/docs/lib/generated/snips/python/cron/programatic-async.ts deleted file mode 100644 index aee7e1834..000000000 --- a/frontend/docs/lib/generated/snips/python/cron/programatic-async.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n\nclass DynamicCronInput(BaseModel):\n name: str\n\n\nasync def create_cron() -> None:\n dynamic_cron_workflow = hatchet.workflow(\n name=\"CronWorkflow\", input_validator=DynamicCronInput\n )\n\n # > Create\n cron_trigger = await dynamic_cron_workflow.aio_create_cron(\n cron_name=\"customer-a-daily-report\",\n expression=\"0 12 * * *\",\n input=DynamicCronInput(name=\"John Doe\"),\n additional_metadata={\n \"customer_id\": \"customer-a\",\n },\n )\n\n cron_trigger.metadata.id # the id of the cron trigger\n\n # > List\n await hatchet.cron.aio_list()\n\n # > Get\n cron_trigger = await hatchet.cron.aio_get(cron_id=cron_trigger.metadata.id)\n\n # > Delete\n await hatchet.cron.aio_delete(cron_id=cron_trigger.metadata.id)\n", - "source": "out/python/cron/programatic-async.py", - "blocks": { - "create": { - "start": 18, - "stop": 27 - }, - "list": { - "start": 30, - "stop": 30 - }, - "get": { - "start": 33, - "stop": 33 - }, - "delete": { - "start": 36, - "stop": 36 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/cron/programatic-sync.ts b/frontend/docs/lib/generated/snips/python/cron/programatic-sync.ts deleted file mode 100644 index 925962d20..000000000 --- a/frontend/docs/lib/generated/snips/python/cron/programatic-sync.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n\nclass DynamicCronInput(BaseModel):\n name: str\n\n\ndynamic_cron_workflow = hatchet.workflow(\n name=\"CronWorkflow\", input_validator=DynamicCronInput\n)\n\n# > Create\ncron_trigger = dynamic_cron_workflow.create_cron(\n cron_name=\"customer-a-daily-report\",\n expression=\"0 12 * * *\",\n input=DynamicCronInput(name=\"John Doe\"),\n additional_metadata={\n \"customer_id\": \"customer-a\",\n },\n)\n\n\nid = cron_trigger.metadata.id # the id of the cron trigger\n\n# > List\ncron_triggers = hatchet.cron.list()\n\n# > Get\ncron_trigger = hatchet.cron.get(cron_id=cron_trigger.metadata.id)\n\n# > Delete\nhatchet.cron.delete(cron_id=cron_trigger.metadata.id)\n", - "source": "out/python/cron/programatic-sync.py", - "blocks": { - "create": { - "start": 17, - "stop": 27 - }, - "list": { - "start": 30, - "stop": 30 - }, - "get": { - "start": 33, - "stop": 33 - }, - "delete": { - "start": 36, - "stop": 36 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/cron/workflow-definition.ts b/frontend/docs/lib/generated/snips/python/cron/workflow-definition.ts deleted file mode 100644 index 030469ac2..000000000 --- a/frontend/docs/lib/generated/snips/python/cron/workflow-definition.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Workflow Definition Cron Trigger\n# Adding a cron trigger to a workflow is as simple\n# as adding a `cron expression` to the `on_cron`\n# prop of the workflow definition\n\ncron_workflow = hatchet.workflow(name=\"CronWorkflow\", on_crons=[\"* * * * *\"])\n\n\n@cron_workflow.task()\ndef step1(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {\n \"time\": \"step1\",\n }\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"test-worker\", slots=1, workflows=[cron_workflow])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/cron/workflow-definition.py", - "blocks": { - "workflow_definition_cron_trigger": { - "start": 7, - "stop": 20 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/dag/index.ts b/frontend/docs/lib/generated/snips/python/dag/index.ts deleted file mode 100644 index a2e947133..000000000 --- a/frontend/docs/lib/generated/snips/python/dag/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_dag from './test_dag'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_dag } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/dag/test_dag.ts b/frontend/docs/lib/generated/snips/python/dag/test_dag.ts deleted file mode 100644 index 96bb27c57..000000000 --- a/frontend/docs/lib/generated/snips/python/dag/test_dag.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import pytest\n\nfrom examples.dag.worker import dag_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run(hatchet: Hatchet) -> None:\n result = await dag_workflow.aio_run()\n\n one = result[\"step1\"][\"random_number\"]\n two = result[\"step2\"][\"random_number\"]\n assert result[\"step3\"][\"sum\"] == one + two\n assert result[\"step4\"][\"step4\"] == \"step4\"\n", - "source": "out/python/dag/test_dag.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/dag/trigger.ts b/frontend/docs/lib/generated/snips/python/dag/trigger.ts deleted file mode 100644 index 60e12350e..000000000 --- a/frontend/docs/lib/generated/snips/python/dag/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.dag.worker import dag_workflow\n\ndag_workflow.run()\n", - "source": "out/python/dag/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/dag/worker.ts b/frontend/docs/lib/generated/snips/python/dag/worker.ts deleted file mode 100644 index bef205d5e..000000000 --- a/frontend/docs/lib/generated/snips/python/dag/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import random\nimport time\nfrom datetime import timedelta\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\n\nclass StepOutput(BaseModel):\n random_number: int\n\n\nclass RandomSum(BaseModel):\n sum: int\n\n\nhatchet = Hatchet(debug=True)\n\ndag_workflow = hatchet.workflow(name=\"DAGWorkflow\")\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\ndef step1(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\nasync def step2(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(parents=[step1, step2])\nasync def step3(input: EmptyModel, ctx: Context) -> RandomSum:\n one = ctx.task_output(step1).random_number\n two = ctx.task_output(step2).random_number\n\n return RandomSum(sum=one + two)\n\n\n@dag_workflow.task(parents=[step1, step3])\nasync def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\n \"executed step4\",\n time.strftime(\"%H:%M:%S\", time.localtime()),\n input,\n ctx.task_output(step1),\n ctx.task_output(step3),\n )\n return {\n \"step4\": \"step4\",\n }\n\n\ndef main() -> None:\n worker = hatchet.worker(\"dag-worker\", workflows=[dag_workflow])\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/dag/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/dedupe/index.ts b/frontend/docs/lib/generated/snips/python/dedupe/index.ts deleted file mode 100644 index 30b1adbeb..000000000 --- a/frontend/docs/lib/generated/snips/python/dedupe/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/dedupe/worker.ts b/frontend/docs/lib/generated/snips/python/dedupe/worker.ts deleted file mode 100644 index 64bc67b1c..000000000 --- a/frontend/docs/lib/generated/snips/python/dedupe/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom datetime import timedelta\nfrom typing import Any\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.exceptions import DedupeViolationError\n\nhatchet = Hatchet(debug=True)\n\ndedupe_parent_wf = hatchet.workflow(name=\"DedupeParent\")\ndedupe_child_wf = hatchet.workflow(name=\"DedupeChild\")\n\n\n@dedupe_parent_wf.task(execution_timeout=timedelta(minutes=1))\nasync def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:\n print(\"spawning child\")\n\n results = []\n\n for i in range(2):\n try:\n results.append(\n dedupe_child_wf.aio_run(\n options=TriggerWorkflowOptions(\n additional_metadata={\"dedupe\": \"test\"}, key=f\"child{i}\"\n ),\n )\n )\n except DedupeViolationError as e:\n print(f\"dedupe violation {e}\")\n continue\n\n result = await asyncio.gather(*results)\n print(f\"results {result}\")\n\n return {\"results\": result}\n\n\n@dedupe_child_wf.task()\nasync def process(input: EmptyModel, ctx: Context) -> dict[str, str]:\n await asyncio.sleep(3)\n\n print(\"child process\")\n return {\"status\": \"success\"}\n\n\n@dedupe_child_wf.task()\nasync def process2(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\"child process2\")\n return {\"status2\": \"success\"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"fanout-worker\", slots=100, workflows=[dedupe_parent_wf, dedupe_child_wf]\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/dedupe/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/delayed/index.ts b/frontend/docs/lib/generated/snips/python/delayed/index.ts deleted file mode 100644 index e137296ba..000000000 --- a/frontend/docs/lib/generated/snips/python/delayed/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_delayed from './test_delayed'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_delayed } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/delayed/test_delayed.ts b/frontend/docs/lib/generated/snips/python/delayed/test_delayed.ts deleted file mode 100644 index 40fd155e9..000000000 --- a/frontend/docs/lib/generated/snips/python/delayed/test_delayed.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# from hatchet_sdk import Hatchet\n# import pytest\n\n# from tests.utils import fixture_bg_worker\n\n\n# worker = fixture_bg_worker([\"poetry\", \"run\", \"manual_trigger\"])\n\n# # @pytest.mark.asyncio(loop_scope=\"session\")\n# async def test_run(hatchet: Hatchet):\n# # TODO\n", - "source": "out/python/delayed/test_delayed.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/delayed/trigger.ts b/frontend/docs/lib/generated/snips/python/delayed/trigger.ts deleted file mode 100644 index 01d966900..000000000 --- a/frontend/docs/lib/generated/snips/python/delayed/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.delayed.worker import PrinterInput, print_schedule_wf\n\nprint_schedule_wf.run(PrinterInput(message=\"test\"))\n", - "source": "out/python/delayed/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/delayed/worker.ts b/frontend/docs/lib/generated/snips/python/delayed/worker.ts deleted file mode 100644 index b1fef0e55..000000000 --- a/frontend/docs/lib/generated/snips/python/delayed/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from datetime import datetime, timedelta, timezone\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass PrinterInput(BaseModel):\n message: str\n\n\nprint_schedule_wf = hatchet.workflow(\n name=\"PrintScheduleWorkflow\",\n input_validator=PrinterInput,\n)\nprint_printer_wf = hatchet.workflow(\n name=\"PrintPrinterWorkflow\", input_validator=PrinterInput\n)\n\n\n@print_schedule_wf.task()\ndef schedule(input: PrinterInput, ctx: Context) -> None:\n now = datetime.now(tz=timezone.utc)\n print(f\"the time is \\t {now.strftime('%H:%M:%S')}\")\n future_time = now + timedelta(seconds=15)\n print(f\"scheduling for \\t {future_time.strftime('%H:%M:%S')}\")\n\n print_printer_wf.schedule(future_time, input=input)\n\n\n@print_schedule_wf.task()\ndef step1(input: PrinterInput, ctx: Context) -> None:\n now = datetime.now(tz=timezone.utc)\n print(f\"printed at \\t {now.strftime('%H:%M:%S')}\")\n print(f\"message \\t {input.message}\")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"delayed-worker\", slots=4, workflows=[print_schedule_wf, print_printer_wf]\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/delayed/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/dependency_injection/index.ts b/frontend/docs/lib/generated/snips/python/dependency_injection/index.ts deleted file mode 100644 index ddb6489f8..000000000 --- a/frontend/docs/lib/generated/snips/python/dependency_injection/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_dependency_injection from './test_dependency_injection'; -import worker from './worker'; - -export { test_dependency_injection } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/dependency_injection/test_dependency_injection.ts b/frontend/docs/lib/generated/snips/python/dependency_injection/test_dependency_injection.ts deleted file mode 100644 index 3f776037d..000000000 --- a/frontend/docs/lib/generated/snips/python/dependency_injection/test_dependency_injection.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import pytest\n\nfrom examples.dependency_injection.worker import (\n ASYNC_DEPENDENCY_VALUE,\n SYNC_DEPENDENCY_VALUE,\n Output,\n async_dep,\n async_task_with_dependencies,\n di_workflow,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n sync_dep,\n sync_task_with_dependencies,\n)\nfrom hatchet_sdk import EmptyModel\nfrom hatchet_sdk.runnables.workflow import Standalone\n\n\n@pytest.mark.parametrize(\n \"task\",\n [\n async_task_with_dependencies,\n sync_task_with_dependencies,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n ],\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_di_standalones(\n task: Standalone[EmptyModel, Output],\n) -> None:\n result = await task.aio_run()\n\n assert isinstance(result, Output)\n assert result.sync_dep == SYNC_DEPENDENCY_VALUE\n assert result.async_dep == ASYNC_DEPENDENCY_VALUE\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_di_workflows() -> None:\n result = await di_workflow.aio_run()\n\n assert len(result) == 4\n\n for output in result.values():\n parsed = Output.model_validate(output)\n\n assert parsed.sync_dep == SYNC_DEPENDENCY_VALUE\n assert parsed.async_dep == ASYNC_DEPENDENCY_VALUE\n", - "source": "out/python/dependency_injection/test_dependency_injection.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/dependency_injection/worker.ts b/frontend/docs/lib/generated/snips/python/dependency_injection/worker.ts deleted file mode 100644 index 8fffaf6b7..000000000 --- a/frontend/docs/lib/generated/snips/python/dependency_injection/worker.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Simple\n\nfrom typing import Annotated\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Depends, DurableContext, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=False)\n\nSYNC_DEPENDENCY_VALUE = \"sync_dependency_value\"\nASYNC_DEPENDENCY_VALUE = \"async_dependency_value\"\n\n\n# > Declare dependencies\nasync def async_dep(input: EmptyModel, ctx: Context) -> str:\n return ASYNC_DEPENDENCY_VALUE\n\n\ndef sync_dep(input: EmptyModel, ctx: Context) -> str:\n return SYNC_DEPENDENCY_VALUE\n\n\n\n\nclass Output(BaseModel):\n sync_dep: str\n async_dep: str\n\n\n# > Inject dependencies\n@hatchet.task()\nasync def async_task_with_dependencies(\n _i: EmptyModel,\n ctx: Context,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n\n\n@hatchet.task()\ndef sync_task_with_dependencies(\n _i: EmptyModel,\n ctx: Context,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@hatchet.durable_task()\nasync def durable_async_task_with_dependencies(\n _i: EmptyModel,\n ctx: DurableContext,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@hatchet.durable_task()\ndef durable_sync_task_with_dependencies(\n _i: EmptyModel,\n ctx: DurableContext,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\ndi_workflow = hatchet.workflow(\n name=\"dependency-injection-workflow\",\n)\n\n\n@di_workflow.task()\nasync def wf_async_task_with_dependencies(\n _i: EmptyModel,\n ctx: Context,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@di_workflow.task()\ndef wf_sync_task_with_dependencies(\n _i: EmptyModel,\n ctx: Context,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@di_workflow.durable_task()\nasync def wf_durable_async_task_with_dependencies(\n _i: EmptyModel,\n ctx: DurableContext,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\n@di_workflow.durable_task()\ndef wf_durable_sync_task_with_dependencies(\n _i: EmptyModel,\n ctx: DurableContext,\n async_dep: Annotated[str, Depends(async_dep)],\n sync_dep: Annotated[str, Depends(sync_dep)],\n) -> Output:\n return Output(\n sync_dep=sync_dep,\n async_dep=async_dep,\n )\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"dependency-injection-worker\",\n workflows=[\n async_task_with_dependencies,\n sync_task_with_dependencies,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n di_workflow,\n ],\n )\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/dependency_injection/worker.py", - "blocks": { - "declare_dependencies": { - "start": 16, - "stop": 23 - }, - "inject_dependencies": { - "start": 32, - "stop": 44 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/durable/index.ts b/frontend/docs/lib/generated/snips/python/durable/index.ts deleted file mode 100644 index 31df3c1dd..000000000 --- a/frontend/docs/lib/generated/snips/python/durable/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_durable from './test_durable'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_durable } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/durable/test_durable.ts b/frontend/docs/lib/generated/snips/python/durable/test_durable.ts deleted file mode 100644 index 5acc42cc8..000000000 --- a/frontend/docs/lib/generated/snips/python/durable/test_durable.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nimport pytest\n\nfrom examples.durable.worker import (\n EVENT_KEY,\n SLEEP_TIME,\n durable_workflow,\n wait_for_sleep_twice,\n)\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_durable(hatchet: Hatchet) -> None:\n ref = durable_workflow.run_no_wait()\n\n await asyncio.sleep(SLEEP_TIME + 10)\n\n hatchet.event.push(EVENT_KEY, {\"test\": \"test\"})\n\n result = await ref.aio_result()\n\n workers = await hatchet.workers.aio_list()\n\n assert workers.rows\n\n active_workers = [w for w in workers.rows if w.status == \"ACTIVE\"]\n\n assert len(active_workers) == 2\n assert any(\n w.name == hatchet.config.apply_namespace(\"e2e-test-worker\")\n for w in active_workers\n )\n assert any(\n w.name == hatchet.config.apply_namespace(\"e2e-test-worker_durable\")\n for w in active_workers\n )\n\n assert result[\"durable_task\"][\"status\"] == \"success\"\n\n wait_group_1 = result[\"wait_for_or_group_1\"]\n wait_group_2 = result[\"wait_for_or_group_2\"]\n\n assert abs(wait_group_1[\"runtime\"] - SLEEP_TIME) < 3\n\n assert wait_group_1[\"key\"] == wait_group_2[\"key\"]\n assert wait_group_1[\"key\"] == \"CREATE\"\n assert \"sleep\" in wait_group_1[\"event_id\"]\n assert \"event\" in wait_group_2[\"event_id\"]\n\n wait_for_multi_sleep = result[\"wait_for_multi_sleep\"]\n\n assert wait_for_multi_sleep[\"runtime\"] > 3 * SLEEP_TIME\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_durable_sleep_cancel_replay(hatchet: Hatchet) -> None:\n first_sleep = await wait_for_sleep_twice.aio_run_no_wait()\n\n await asyncio.sleep(SLEEP_TIME / 2)\n\n await hatchet.runs.aio_cancel(first_sleep.workflow_run_id)\n\n await first_sleep.aio_result()\n\n await hatchet.runs.aio_replay(\n first_sleep.workflow_run_id,\n )\n\n second_sleep_result = await first_sleep.aio_result()\n\n \"\"\"We've already slept for a little bit by the time the task is cancelled\"\"\"\n assert second_sleep_result[\"runtime\"] <= SLEEP_TIME\n", - "source": "out/python/durable/test_durable.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/durable/trigger.ts b/frontend/docs/lib/generated/snips/python/durable/trigger.ts deleted file mode 100644 index 162ddbdc8..000000000 --- a/frontend/docs/lib/generated/snips/python/durable/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\n\nfrom examples.durable.worker import (\n EVENT_KEY,\n SLEEP_TIME,\n durable_workflow,\n ephemeral_workflow,\n hatchet,\n)\n\ndurable_workflow.run_no_wait()\nephemeral_workflow.run_no_wait()\n\nprint(\"Sleeping\")\ntime.sleep(SLEEP_TIME + 2)\n\nprint(\"Pushing event\")\nhatchet.event.push(EVENT_KEY, {})\n", - "source": "out/python/durable/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/durable/worker.ts b/frontend/docs/lib/generated/snips/python/durable/worker.ts deleted file mode 100644 index f6f694497..000000000 --- a/frontend/docs/lib/generated/snips/python/durable/worker.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nimport time\nfrom datetime import timedelta\nfrom uuid import uuid4\n\nfrom hatchet_sdk import (\n Context,\n DurableContext,\n EmptyModel,\n Hatchet,\n SleepCondition,\n UserEventCondition,\n or_,\n)\n\nhatchet = Hatchet(debug=True)\n\n# > Create a durable workflow\ndurable_workflow = hatchet.workflow(name=\"DurableWorkflow\")\n\n\nephemeral_workflow = hatchet.workflow(name=\"EphemeralWorkflow\")\n\n\n# > Add durable task\nEVENT_KEY = \"durable-example:event\"\nSLEEP_TIME = 5\n\n\n@durable_workflow.task()\nasync def ephemeral_task(input: EmptyModel, ctx: Context) -> None:\n print(\"Running non-durable task\")\n\n\n@durable_workflow.durable_task()\nasync def durable_task(input: EmptyModel, ctx: DurableContext) -> dict[str, str]:\n print(\"Waiting for sleep\")\n await ctx.aio_sleep_for(duration=timedelta(seconds=SLEEP_TIME))\n print(\"Sleep finished\")\n\n print(\"Waiting for event\")\n await ctx.aio_wait_for(\n \"event\",\n UserEventCondition(event_key=EVENT_KEY, expression=\"true\"),\n )\n print(\"Event received\")\n\n return {\n \"status\": \"success\",\n }\n\n\n\n\n# > Add durable tasks that wait for or groups\n\n\n@durable_workflow.durable_task()\nasync def wait_for_or_group_1(\n _i: EmptyModel, ctx: DurableContext\n) -> dict[str, str | int]:\n start = time.time()\n wait_result = await ctx.aio_wait_for(\n uuid4().hex,\n or_(\n SleepCondition(timedelta(seconds=SLEEP_TIME)),\n UserEventCondition(event_key=EVENT_KEY),\n ),\n )\n\n key = list(wait_result.keys())[0]\n event_id = list(wait_result[key].keys())[0]\n\n return {\n \"runtime\": int(time.time() - start),\n \"key\": key,\n \"event_id\": event_id,\n }\n\n\n\n\n@durable_workflow.durable_task()\nasync def wait_for_or_group_2(\n _i: EmptyModel, ctx: DurableContext\n) -> dict[str, str | int]:\n start = time.time()\n wait_result = await ctx.aio_wait_for(\n uuid4().hex,\n or_(\n SleepCondition(timedelta(seconds=6 * SLEEP_TIME)),\n UserEventCondition(event_key=EVENT_KEY),\n ),\n )\n\n key = list(wait_result.keys())[0]\n event_id = list(wait_result[key].keys())[0]\n\n return {\n \"runtime\": int(time.time() - start),\n \"key\": key,\n \"event_id\": event_id,\n }\n\n\n@durable_workflow.durable_task()\nasync def wait_for_multi_sleep(\n _i: EmptyModel, ctx: DurableContext\n) -> dict[str, str | int]:\n start = time.time()\n\n for _ in range(3):\n await ctx.aio_sleep_for(\n timedelta(seconds=SLEEP_TIME),\n )\n\n return {\n \"runtime\": int(time.time() - start),\n }\n\n\n@ephemeral_workflow.task()\ndef ephemeral_task_2(input: EmptyModel, ctx: Context) -> None:\n print(\"Running non-durable task\")\n\n\n@hatchet.durable_task()\nasync def wait_for_sleep_twice(\n input: EmptyModel, ctx: DurableContext\n) -> dict[str, int]:\n try:\n start = time.time()\n\n await ctx.aio_sleep_for(\n timedelta(seconds=SLEEP_TIME),\n )\n\n return {\n \"runtime\": int(time.time() - start),\n }\n except asyncio.CancelledError:\n return {\"runtime\": -1}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"durable-worker\",\n workflows=[durable_workflow, ephemeral_workflow, wait_for_sleep_twice],\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/durable/worker.py", - "blocks": { - "create_a_durable_workflow": { - "start": 19, - "stop": 19 - }, - "add_durable_task": { - "start": 26, - "stop": 52 - }, - "add_durable_tasks_that_wait_for_or_groups": { - "start": 56, - "stop": 80 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/durable_event/index.ts b/frontend/docs/lib/generated/snips/python/durable_event/index.ts deleted file mode 100644 index c443f556e..000000000 --- a/frontend/docs/lib/generated/snips/python/durable_event/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/durable_event/trigger.ts b/frontend/docs/lib/generated/snips/python/durable_event/trigger.ts deleted file mode 100644 index 7e72d1e9b..000000000 --- a/frontend/docs/lib/generated/snips/python/durable_event/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\n\nfrom examples.durable_event.worker import (\n EVENT_KEY,\n durable_event_task,\n durable_event_task_with_filter,\n hatchet,\n)\n\ndurable_event_task.run_no_wait()\ndurable_event_task_with_filter.run_no_wait()\n\nprint(\"Sleeping\")\ntime.sleep(2)\n\nprint(\"Pushing event\")\nhatchet.event.push(\n EVENT_KEY,\n {\n \"user_id\": \"1234\",\n },\n)\n", - "source": "out/python/durable_event/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/durable_event/worker.ts b/frontend/docs/lib/generated/snips/python/durable_event/worker.ts deleted file mode 100644 index 15dbee2d7..000000000 --- a/frontend/docs/lib/generated/snips/python/durable_event/worker.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import DurableContext, EmptyModel, Hatchet, UserEventCondition\n\nhatchet = Hatchet(debug=True)\n\nEVENT_KEY = \"user:update\"\n\n\n# > Durable Event\n@hatchet.durable_task(name=\"DurableEventTask\")\nasync def durable_event_task(input: EmptyModel, ctx: DurableContext) -> None:\n res = await ctx.aio_wait_for(\n \"event\",\n UserEventCondition(event_key=\"user:update\"),\n )\n\n print(\"got event\", res)\n\n\n\n\n@hatchet.durable_task(name=\"DurableEventWithFilterTask\")\nasync def durable_event_task_with_filter(\n input: EmptyModel, ctx: DurableContext\n) -> None:\n # > Durable Event With Filter\n res = await ctx.aio_wait_for(\n \"event\",\n UserEventCondition(\n event_key=\"user:update\", expression=\"input.user_id == '1234'\"\n ),\n )\n\n print(\"got event\", res)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"durable-event-worker\",\n workflows=[durable_event_task, durable_event_task_with_filter],\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/durable_event/worker.py", - "blocks": { - "durable_event": { - "start": 9, - "stop": 18 - }, - "durable_event_with_filter": { - "start": 26, - "stop": 31 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/durable_sleep/index.ts b/frontend/docs/lib/generated/snips/python/durable_sleep/index.ts deleted file mode 100644 index c443f556e..000000000 --- a/frontend/docs/lib/generated/snips/python/durable_sleep/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/durable_sleep/trigger.ts b/frontend/docs/lib/generated/snips/python/durable_sleep/trigger.ts deleted file mode 100644 index 54e067f6e..000000000 --- a/frontend/docs/lib/generated/snips/python/durable_sleep/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.durable_sleep.worker import durable_sleep_task\n\ndurable_sleep_task.run_no_wait()\n", - "source": "out/python/durable_sleep/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/durable_sleep/worker.ts b/frontend/docs/lib/generated/snips/python/durable_sleep/worker.ts deleted file mode 100644 index 14c999304..000000000 --- a/frontend/docs/lib/generated/snips/python/durable_sleep/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from datetime import timedelta\n\nfrom hatchet_sdk import DurableContext, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Durable Sleep\n@hatchet.durable_task(name=\"DurableSleepTask\")\nasync def durable_sleep_task(input: EmptyModel, ctx: DurableContext) -> None:\n res = await ctx.aio_sleep_for(timedelta(seconds=5))\n\n print(\"got result\", res)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"durable-sleep-worker\", workflows=[durable_sleep_task])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/durable_sleep/worker.py", - "blocks": { - "durable_sleep": { - "start": 9, - "stop": 15 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/events/event.ts b/frontend/docs/lib/generated/snips/python/events/event.ts deleted file mode 100644 index d0d92d683..000000000 --- a/frontend/docs/lib/generated/snips/python/events/event.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n# > Event trigger\nhatchet.event.push(\"user:create\", {\"should_skip\": False})\n", - "source": "out/python/events/event.py", - "blocks": { - "event_trigger": { - "start": 6, - "stop": 6 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/events/filter.ts b/frontend/docs/lib/generated/snips/python/events/filter.ts deleted file mode 100644 index f1e6d63b1..000000000 --- a/frontend/docs/lib/generated/snips/python/events/filter.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.events.worker import EVENT_KEY, event_workflow\nfrom hatchet_sdk import Hatchet, PushEventOptions\n\nhatchet = Hatchet()\n\n# > Create a filter\nhatchet.filters.create(\n workflow_id=event_workflow.id,\n expression=\"input.should_skip == false\",\n scope=\"foobarbaz\",\n payload={\n \"main_character\": \"Anna\",\n \"supporting_character\": \"Stiva\",\n \"location\": \"Moscow\",\n },\n)\n\n# > Skip a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n \"should_skip\": True,\n },\n options=PushEventOptions(\n scope=\"foobarbaz\",\n ),\n)\n\n# > Trigger a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n \"should_skip\": False,\n },\n options=PushEventOptions(\n scope=\"foobarbaz\",\n ),\n)\n", - "source": "out/python/events/filter.py", - "blocks": { - "create_a_filter": { - "start": 7, - "stop": 16 - }, - "skip_a_run": { - "start": 19, - "stop": 27 - }, - "trigger_a_run": { - "start": 30, - "stop": 38 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/events/index.ts b/frontend/docs/lib/generated/snips/python/events/index.ts deleted file mode 100644 index 7852e15a6..000000000 --- a/frontend/docs/lib/generated/snips/python/events/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import event from './event'; -import filter from './filter'; -import test_event from './test_event'; -import worker from './worker'; - -export { event } -export { filter } -export { test_event } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/events/test_event.ts b/frontend/docs/lib/generated/snips/python/events/test_event.ts deleted file mode 100644 index 78c072f87..000000000 --- a/frontend/docs/lib/generated/snips/python/events/test_event.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nimport json\nfrom collections.abc import AsyncGenerator\nfrom contextlib import asynccontextmanager\nfrom datetime import datetime, timedelta, timezone\nfrom typing import cast\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.events.worker import (\n EVENT_KEY,\n SECONDARY_KEY,\n WILDCARD_KEY,\n EventWorkflowInput,\n event_workflow,\n)\nfrom hatchet_sdk.clients.events import (\n BulkPushEventOptions,\n BulkPushEventWithMetadata,\n PushEventOptions,\n)\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\nfrom hatchet_sdk.contracts.events_pb2 import Event\nfrom hatchet_sdk.hatchet import Hatchet\n\n\nclass ProcessedEvent(BaseModel):\n id: str\n payload: dict[str, str | bool]\n meta: dict[str, str | bool | int]\n should_have_runs: bool\n test_run_id: str\n\n def __hash__(self) -> int:\n return hash(self.model_dump_json())\n\n\n@asynccontextmanager\nasync def event_filter(\n hatchet: Hatchet,\n test_run_id: str,\n expression: str | None = None,\n payload: dict[str, str] = {},\n scope: str | None = None,\n) -> AsyncGenerator[None, None]:\n expression = (\n expression\n or f\"input.should_skip == false && payload.test_run_id == '{test_run_id}'\"\n )\n\n f = await hatchet.filters.aio_create(\n workflow_id=event_workflow.id,\n expression=expression,\n scope=scope or test_run_id,\n payload={\"test_run_id\": test_run_id, **payload},\n )\n\n try:\n yield\n finally:\n await hatchet.filters.aio_delete(f.metadata.id)\n\n\nasync def fetch_runs_for_event(\n hatchet: Hatchet, event: Event\n) -> tuple[ProcessedEvent, list[V1TaskSummary]]:\n runs = await hatchet.runs.aio_list(triggering_event_external_id=event.eventId)\n\n meta = (\n cast(dict[str, str | int | bool], json.loads(event.additionalMetadata))\n if event.additionalMetadata\n else {}\n )\n payload = (\n cast(dict[str, str | bool], json.loads(event.payload)) if event.payload else {}\n )\n\n processed_event = ProcessedEvent(\n id=event.eventId,\n payload=payload,\n meta=meta,\n should_have_runs=meta.get(\"should_have_runs\", False) is True,\n test_run_id=cast(str, meta[\"test_run_id\"]),\n )\n\n if not all([r.output for r in runs.rows]):\n return (processed_event, [])\n\n return (\n processed_event,\n runs.rows or [],\n )\n\n\nasync def wait_for_result(\n hatchet: Hatchet, events: list[Event]\n) -> dict[ProcessedEvent, list[V1TaskSummary]]:\n await asyncio.sleep(3)\n\n since = datetime.now(tz=timezone.utc) - timedelta(minutes=2)\n\n persisted = (await hatchet.event.aio_list(limit=100, since=since)).rows or []\n\n assert {e.eventId for e in events}.issubset({e.metadata.id for e in persisted})\n\n iters = 0\n while True:\n print(\"Waiting for event runs to complete...\")\n if iters > 15:\n print(\"Timed out waiting for event runs to complete.\")\n return {\n ProcessedEvent(\n id=event.eventId,\n payload=json.loads(event.payload) if event.payload else {},\n meta=(\n json.loads(event.additionalMetadata)\n if event.additionalMetadata\n else {}\n ),\n should_have_runs=False,\n test_run_id=cast(\n str, json.loads(event.additionalMetadata).get(\"test_run_id\", \"\")\n ),\n ): []\n for event in events\n }\n\n iters += 1\n\n event_runs = await asyncio.gather(\n *[fetch_runs_for_event(hatchet, event) for event in events]\n )\n\n all_empty = all(not event_run for _, event_run in event_runs)\n\n if all_empty:\n await asyncio.sleep(1)\n continue\n\n event_id_to_runs = {event_id: runs for (event_id, runs) in event_runs}\n\n any_queued_or_running = any(\n run.status in [V1TaskStatus.QUEUED, V1TaskStatus.RUNNING]\n for runs in event_id_to_runs.values()\n for run in runs\n )\n\n if any_queued_or_running:\n await asyncio.sleep(1)\n continue\n\n break\n\n return event_id_to_runs\n\n\nasync def wait_for_result_and_assert(hatchet: Hatchet, events: list[Event]) -> None:\n event_to_runs = await wait_for_result(hatchet, events)\n\n for event, runs in event_to_runs.items():\n await assert_event_runs_processed(event, runs)\n\n\nasync def assert_event_runs_processed(\n event: ProcessedEvent,\n runs: list[V1TaskSummary],\n) -> None:\n runs = [\n run\n for run in runs\n if (run.additional_metadata or {}).get(\"hatchet__event_id\") == event.id\n ]\n\n if event.should_have_runs:\n assert len(runs) > 0\n\n for run in runs:\n assert run.status == V1TaskStatus.COMPLETED\n assert run.output.get(\"test_run_id\") == event.test_run_id\n else:\n assert len(runs) == 0\n\n\ndef bpi(\n index: int = 1,\n test_run_id: str = \"\",\n should_skip: bool = False,\n should_have_runs: bool = True,\n key: str = EVENT_KEY,\n payload: dict[str, str] = {},\n scope: str | None = None,\n) -> BulkPushEventWithMetadata:\n return BulkPushEventWithMetadata(\n key=key,\n payload={\n \"should_skip\": should_skip,\n **payload,\n },\n additional_metadata={\n \"should_have_runs\": should_have_runs,\n \"test_run_id\": test_run_id,\n \"key\": index,\n },\n scope=scope,\n )\n\n\ndef cp(should_skip: bool) -> dict[str, bool]:\n return EventWorkflowInput(should_skip=should_skip).model_dump()\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_event_push(hatchet: Hatchet) -> None:\n e = hatchet.event.push(EVENT_KEY, cp(False))\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_async_event_push(hatchet: Hatchet) -> None:\n e = await hatchet.event.aio_push(EVENT_KEY, cp(False))\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_async_event_bulk_push(hatchet: Hatchet) -> None:\n events = [\n BulkPushEventWithMetadata(\n key=\"event1\",\n payload={\"message\": \"This is event 1\", \"should_skip\": False},\n additional_metadata={\"source\": \"test\", \"user_id\": \"user123\"},\n ),\n BulkPushEventWithMetadata(\n key=\"event2\",\n payload={\"message\": \"This is event 2\", \"should_skip\": False},\n additional_metadata={\"source\": \"test\", \"user_id\": \"user456\"},\n ),\n BulkPushEventWithMetadata(\n key=\"event3\",\n payload={\"message\": \"This is event 3\", \"should_skip\": False},\n additional_metadata={\"source\": \"test\", \"user_id\": \"user789\"},\n ),\n ]\n opts = BulkPushEventOptions(namespace=\"bulk-test\")\n\n e = await hatchet.event.aio_bulk_push(events, opts)\n\n assert len(e) == 3\n\n # Sort both lists of events by their key to ensure comparison order\n sorted_events = sorted(events, key=lambda x: x.key)\n sorted_returned_events = sorted(e, key=lambda x: x.key)\n namespace = \"bulk-test\"\n\n # Check that the returned events match the original events\n for original_event, returned_event in zip(\n sorted_events, sorted_returned_events, strict=False\n ):\n assert returned_event.key == namespace + original_event.key\n\n\n@pytest.fixture(scope=\"function\")\ndef test_run_id() -> str:\n return str(uuid4())\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_event_engine_behavior(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n events = [\n bpi(\n test_run_id=test_run_id,\n ),\n bpi(\n test_run_id=test_run_id,\n key=\"thisisafakeeventfoobarbaz\",\n should_have_runs=False,\n ),\n ]\n\n result = await hatchet.event.aio_bulk_push(events)\n\n await wait_for_result_and_assert(hatchet, result)\n\n\ndef gen_bulk_events(test_run_id: str) -> list[BulkPushEventWithMetadata]:\n return [\n ## No scope, so it shouldn't have any runs\n bpi(\n index=1,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=False,\n ),\n ## No scope, so it shouldn't have any runs\n bpi(\n index=2,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n ),\n ## Scope is set and `should_skip` is False, so it should have runs\n bpi(\n index=3,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=True,\n scope=test_run_id,\n ),\n ## Scope is set and `should_skip` is True, so it shouldn't have runs\n bpi(\n index=4,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n scope=test_run_id,\n ),\n ## Scope is set, `should_skip` is False, but key is different, so it shouldn't have runs\n bpi(\n index=5,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n scope=test_run_id,\n key=\"thisisafakeeventfoobarbaz\",\n ),\n ## Scope is set, `should_skip` is False, but key is different, so it shouldn't have runs\n bpi(\n index=6,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=False,\n scope=test_run_id,\n key=\"thisisafakeeventfoobarbaz\",\n ),\n ]\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_event_skipping_filtering(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(hatchet, test_run_id):\n events = gen_bulk_events(test_run_id)\n\n result = await hatchet.event.aio_bulk_push(events)\n\n await wait_for_result_and_assert(hatchet, result)\n\n\nasync def bulk_to_single(hatchet: Hatchet, event: BulkPushEventWithMetadata) -> Event:\n return await hatchet.event.aio_push(\n event_key=event.key,\n payload=event.payload,\n options=PushEventOptions(\n scope=event.scope,\n additional_metadata=event.additional_metadata,\n priority=event.priority,\n ),\n )\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_event_skipping_filtering_no_bulk(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(hatchet, test_run_id):\n raw_events = gen_bulk_events(test_run_id)\n events = await asyncio.gather(\n *[bulk_to_single(hatchet, event) for event in raw_events]\n )\n\n await wait_for_result_and_assert(hatchet, events)\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_event_payload_filtering(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n \"input.should_skip == false && payload.foobar == 'baz'\",\n {\"foobar\": \"qux\"},\n ):\n event = await hatchet.event.aio_push(\n event_key=EVENT_KEY,\n payload={\"message\": \"This is event 1\", \"should_skip\": False},\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n \"should_have_runs\": False,\n \"test_run_id\": test_run_id,\n \"key\": 1,\n },\n ),\n )\n\n await wait_for_result_and_assert(hatchet, [event])\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_event_payload_filtering_with_payload_match(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n \"input.should_skip == false && payload.foobar == 'baz'\",\n {\"foobar\": \"baz\"},\n ):\n event = await hatchet.event.aio_push(\n event_key=EVENT_KEY,\n payload={\"message\": \"This is event 1\", \"should_skip\": False},\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n \"should_have_runs\": True,\n \"test_run_id\": test_run_id,\n \"key\": 1,\n },\n ),\n )\n\n await wait_for_result_and_assert(hatchet, [event])\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_filtering_by_event_key(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n f\"event_key == '{SECONDARY_KEY}'\",\n ):\n event_1 = await hatchet.event.aio_push(\n event_key=SECONDARY_KEY,\n payload={\n \"message\": \"Should run because filter matches\",\n \"should_skip\": False,\n },\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n \"should_have_runs\": True,\n \"test_run_id\": test_run_id,\n },\n ),\n )\n event_2 = await hatchet.event.aio_push(\n event_key=EVENT_KEY,\n payload={\n \"message\": \"Should skip because filter does not match\",\n \"should_skip\": False,\n },\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n \"should_have_runs\": False,\n \"test_run_id\": test_run_id,\n },\n ),\n )\n\n await wait_for_result_and_assert(hatchet, [event_1, event_2])\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_key_wildcards(hatchet: Hatchet, test_run_id: str) -> None:\n keys = [\n WILDCARD_KEY.replace(\"*\", \"1\"),\n WILDCARD_KEY.replace(\"*\", \"2\"),\n \"foobar\",\n EVENT_KEY,\n ]\n\n async with event_filter(\n hatchet,\n test_run_id,\n ):\n events = [\n await hatchet.event.aio_push(\n event_key=key,\n payload={\n \"should_skip\": False,\n },\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n \"should_have_runs\": key != \"foobar\",\n \"test_run_id\": test_run_id,\n },\n ),\n )\n for key in keys\n ]\n\n await wait_for_result_and_assert(hatchet, events)\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_multiple_runs_for_multiple_scope_matches(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(\n hatchet, test_run_id, payload={\"filter_id\": \"1\"}, expression=\"1 == 1\"\n ):\n async with event_filter(\n hatchet, test_run_id, payload={\"filter_id\": \"2\"}, expression=\"2 == 2\"\n ):\n event = await hatchet.event.aio_push(\n event_key=EVENT_KEY,\n payload={\n \"should_skip\": False,\n },\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n \"should_have_runs\": True,\n \"test_run_id\": test_run_id,\n },\n ),\n )\n\n event_to_runs = await wait_for_result(hatchet, [event])\n\n assert len(event_to_runs.keys()) == 1\n\n runs = list(event_to_runs.values())[0]\n\n assert len(runs) == 2\n\n assert {r.output.get(\"filter_id\") for r in runs} == {\"1\", \"2\"}\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_multi_scope_bug(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(hatchet, test_run_id, expression=\"1 == 1\", scope=\"a\"):\n async with event_filter(\n hatchet,\n test_run_id,\n expression=\"2 == 2\",\n scope=\"b\",\n ):\n events = await hatchet.event.aio_bulk_push(\n [\n BulkPushEventWithMetadata(\n key=EVENT_KEY,\n payload={\n \"should_skip\": False,\n },\n additional_metadata={\n \"should_have_runs\": True,\n \"test_run_id\": test_run_id,\n },\n scope=\"a\" if i % 2 == 0 else \"b\",\n )\n for i in range(100)\n ],\n )\n\n await asyncio.sleep(15)\n\n for event in events:\n runs = await hatchet.runs.aio_list(\n triggering_event_external_id=event.eventId,\n additional_metadata={\"test_run_id\": test_run_id},\n )\n\n assert len(runs.rows) == 1\n", - "source": "out/python/events/test_event.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/events/worker.ts b/frontend/docs/lib/generated/snips/python/events/worker.ts deleted file mode 100644 index be9caaccd..000000000 --- a/frontend/docs/lib/generated/snips/python/events/worker.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, DefaultFilter, Hatchet\n\nhatchet = Hatchet()\n\n\n# > Event trigger\nEVENT_KEY = \"user:create\"\nSECONDARY_KEY = \"foobarbaz\"\nWILDCARD_KEY = \"subscription:*\"\n\n\nclass EventWorkflowInput(BaseModel):\n should_skip: bool\n\n\nevent_workflow = hatchet.workflow(\n name=\"EventWorkflow\",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n)\n\n# > Event trigger with filter\nevent_workflow_with_filter = hatchet.workflow(\n name=\"EventWorkflow\",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n default_filters=[\n DefaultFilter(\n expression=\"true\",\n scope=\"example-scope\",\n payload={\n \"main_character\": \"Anna\",\n \"supporting_character\": \"Stiva\",\n \"location\": \"Moscow\",\n },\n )\n ],\n)\n\n\n@event_workflow.task()\ndef task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:\n print(\"event received\")\n\n return ctx.filter_payload\n\n\n# > Accessing the filter payload\n@event_workflow_with_filter.task()\ndef filtered_task(input: EventWorkflowInput, ctx: Context) -> None:\n print(ctx.filter_payload)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(name=\"EventWorker\", workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/events/worker.py", - "blocks": { - "event_trigger": { - "start": 9, - "stop": 22 - }, - "event_trigger_with_filter": { - "start": 25, - "stop": 40 - }, - "accessing_the_filter_payload": { - "start": 51, - "stop": 55 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/fanout/index.ts b/frontend/docs/lib/generated/snips/python/fanout/index.ts deleted file mode 100644 index 25c7710b0..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import stream from './stream'; -import sync_stream from './sync_stream'; -import test_fanout from './test_fanout'; -import trigger from './trigger'; -import worker from './worker'; - -export { stream } -export { sync_stream } -export { test_fanout } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/fanout/stream.ts b/frontend/docs/lib/generated/snips/python/fanout/stream.ts deleted file mode 100644 index fb0dcf0b8..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout/stream.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nimport random\n\nfrom examples.fanout.worker import ParentInput, parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\n\nasync def main() -> None:\n\n hatchet = Hatchet()\n\n # Generate a random stream key to use to track all\n # stream events for this workflow run.\n\n streamKey = \"streamKey\"\n streamVal = f\"sk-{random.randint(1, 100)}\"\n\n # Specify the stream key as additional metadata\n # when running the workflow.\n\n # This key gets propagated to all child workflows\n # and can have an arbitrary property name.\n\n parent_wf.run(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={streamKey: streamVal}),\n )\n\n # Stream all events for the additional meta key value\n listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal)\n\n async for event in listener:\n print(event.type, event.payload)\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/fanout/stream.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/fanout/sync_stream.ts b/frontend/docs/lib/generated/snips/python/fanout/sync_stream.ts deleted file mode 100644 index 8d68ad3cb..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout/sync_stream.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import random\n\nfrom examples.fanout.worker import ParentInput, parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\n\ndef main() -> None:\n\n hatchet = Hatchet()\n\n # Generate a random stream key to use to track all\n # stream events for this workflow run.\n\n streamKey = \"streamKey\"\n streamVal = f\"sk-{random.randint(1, 100)}\"\n\n # Specify the stream key as additional metadata\n # when running the workflow.\n\n # This key gets propagated to all child workflows\n # and can have an arbitrary property name.\n\n parent_wf.run(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={streamKey: streamVal}),\n )\n\n # Stream all events for the additional meta key value\n listener = hatchet.listener.stream_by_additional_metadata(streamKey, streamVal)\n\n for event in listener:\n print(event.type, event.payload)\n\n print(\"DONE.\")\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/fanout/sync_stream.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/fanout/test_fanout.ts b/frontend/docs/lib/generated/snips/python/fanout/test_fanout.ts deleted file mode 100644 index fdefe522b..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout/test_fanout.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom uuid import uuid4\n\nimport pytest\n\nfrom examples.fanout.worker import ParentInput, parent_wf\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run(hatchet: Hatchet) -> None:\n ref = await parent_wf.aio_run_no_wait(\n ParentInput(n=2),\n )\n\n result = await ref.aio_result()\n\n assert len(result[\"spawn\"][\"results\"]) == 2\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_additional_metadata_propagation(hatchet: Hatchet) -> None:\n test_run_id = uuid4().hex\n\n ref = await parent_wf.aio_run_no_wait(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(\n additional_metadata={\"test_run_id\": test_run_id}\n ),\n )\n\n await ref.aio_result()\n await asyncio.sleep(1)\n\n runs = await hatchet.runs.aio_list(\n parent_task_external_id=ref.workflow_run_id,\n additional_metadata={\"test_run_id\": test_run_id},\n )\n\n assert runs.rows\n\n \"\"\"Assert that the additional metadata is propagated to the child runs.\"\"\"\n for run in runs.rows:\n assert run.additional_metadata\n assert run.additional_metadata[\"test_run_id\"] == test_run_id\n\n assert run.children\n for child in run.children:\n assert child.additional_metadata\n assert child.additional_metadata[\"test_run_id\"] == test_run_id\n", - "source": "out/python/fanout/test_fanout.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/fanout/trigger.ts b/frontend/docs/lib/generated/snips/python/fanout/trigger.ts deleted file mode 100644 index 4924d8164..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom examples.fanout.worker import ParentInput, parent_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\n\nhatchet = Hatchet()\n\n\nasync def main() -> None:\n await parent_wf.aio_run(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={\"hello\": \"moon\"}),\n )\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/fanout/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/fanout/worker.ts b/frontend/docs/lib/generated/snips/python/fanout/worker.ts deleted file mode 100644 index dbf13631f..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout/worker.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from datetime import timedelta\nfrom typing import Any\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet, TriggerWorkflowOptions\n\nhatchet = Hatchet(debug=True)\n\n\n# > FanoutParent\nclass ParentInput(BaseModel):\n n: int = 100\n\n\nclass ChildInput(BaseModel):\n a: str\n\n\nparent_wf = hatchet.workflow(name=\"FanoutParent\", input_validator=ParentInput)\nchild_wf = hatchet.workflow(name=\"FanoutChild\", input_validator=ChildInput)\n\n\n@parent_wf.task(execution_timeout=timedelta(minutes=5))\nasync def spawn(input: ParentInput, ctx: Context) -> dict[str, Any]:\n print(\"spawning child\")\n\n result = await child_wf.aio_run_many(\n [\n child_wf.create_bulk_run_item(\n input=ChildInput(a=str(i)),\n options=TriggerWorkflowOptions(\n additional_metadata={\"hello\": \"earth\"}, key=f\"child{i}\"\n ),\n )\n for i in range(input.n)\n ],\n )\n\n print(f\"results {result}\")\n\n return {\"results\": result}\n\n\n\n\n# > FanoutChild\n@child_wf.task()\nasync def process(input: ChildInput, ctx: Context) -> dict[str, str]:\n print(f\"child process {input.a}\")\n return {\"status\": input.a}\n\n\n@child_wf.task(parents=[process])\nasync def process2(input: ChildInput, ctx: Context) -> dict[str, str]:\n process_output = ctx.task_output(process)\n a = process_output[\"status\"]\n\n return {\"status2\": a + \"2\"}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"fanout-worker\", slots=40, workflows=[parent_wf, child_wf])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/fanout/worker.py", - "blocks": { - "fanoutparent": { - "start": 12, - "stop": 44 - }, - "fanoutchild": { - "start": 48, - "stop": 61 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/fanout_sync/index.ts b/frontend/docs/lib/generated/snips/python/fanout_sync/index.ts deleted file mode 100644 index c62fd3e95..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout_sync/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_fanout_sync from './test_fanout_sync'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_fanout_sync } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/fanout_sync/test_fanout_sync.ts b/frontend/docs/lib/generated/snips/python/fanout_sync/test_fanout_sync.ts deleted file mode 100644 index 0414fd842..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout_sync/test_fanout_sync.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom uuid import uuid4\n\nimport pytest\n\nfrom examples.fanout_sync.worker import ParentInput, sync_fanout_parent\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\n\n\ndef test_run() -> None:\n N = 2\n\n result = sync_fanout_parent.run(ParentInput(n=N))\n\n assert len(result[\"spawn\"][\"results\"]) == N\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_additional_metadata_propagation_sync(hatchet: Hatchet) -> None:\n test_run_id = uuid4().hex\n\n ref = await sync_fanout_parent.aio_run_no_wait(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(\n additional_metadata={\"test_run_id\": test_run_id}\n ),\n )\n\n await ref.aio_result()\n await asyncio.sleep(1)\n\n runs = await hatchet.runs.aio_list(\n parent_task_external_id=ref.workflow_run_id,\n additional_metadata={\"test_run_id\": test_run_id},\n )\n\n assert runs.rows\n\n \"\"\"Assert that the additional metadata is propagated to the child runs.\"\"\"\n for run in runs.rows:\n assert run.additional_metadata\n assert run.additional_metadata[\"test_run_id\"] == test_run_id\n\n assert run.children\n for child in run.children:\n assert child.additional_metadata\n assert child.additional_metadata[\"test_run_id\"] == test_run_id\n", - "source": "out/python/fanout_sync/test_fanout_sync.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/fanout_sync/trigger.ts b/frontend/docs/lib/generated/snips/python/fanout_sync/trigger.ts deleted file mode 100644 index 7f670e986..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout_sync/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom examples.fanout_sync.worker import ParentInput, sync_fanout_parent\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\n\nhatchet = Hatchet()\n\n\nasync def main() -> None:\n sync_fanout_parent.run(\n ParentInput(n=2),\n options=TriggerWorkflowOptions(additional_metadata={\"hello\": \"moon\"}),\n )\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/fanout_sync/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/fanout_sync/worker.ts b/frontend/docs/lib/generated/snips/python/fanout_sync/worker.ts deleted file mode 100644 index b5b3f6f75..000000000 --- a/frontend/docs/lib/generated/snips/python/fanout_sync/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from datetime import timedelta\nfrom typing import Any\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet, TriggerWorkflowOptions\n\nhatchet = Hatchet(debug=True)\n\n\nclass ParentInput(BaseModel):\n n: int = 5\n\n\nclass ChildInput(BaseModel):\n a: str\n\n\nsync_fanout_parent = hatchet.workflow(\n name=\"SyncFanoutParent\", input_validator=ParentInput\n)\nsync_fanout_child = hatchet.workflow(name=\"SyncFanoutChild\", input_validator=ChildInput)\n\n\n@sync_fanout_parent.task(execution_timeout=timedelta(minutes=5))\ndef spawn(input: ParentInput, ctx: Context) -> dict[str, list[dict[str, Any]]]:\n print(\"spawning child\")\n\n results = sync_fanout_child.run_many(\n [\n sync_fanout_child.create_bulk_run_item(\n input=ChildInput(a=str(i)),\n key=f\"child{i}\",\n options=TriggerWorkflowOptions(additional_metadata={\"hello\": \"earth\"}),\n )\n for i in range(input.n)\n ],\n )\n\n print(f\"results {results}\")\n\n return {\"results\": results}\n\n\n@sync_fanout_child.task()\ndef process(input: ChildInput, ctx: Context) -> dict[str, str]:\n return {\"status\": \"success \" + input.a}\n\n\n@sync_fanout_child.task(parents=[process])\ndef process2(input: ChildInput, ctx: Context) -> dict[str, str]:\n process_output = ctx.task_output(process)\n a = process_output[\"status\"]\n\n return {\"status2\": a + \"2\"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"sync-fanout-worker\",\n slots=40,\n workflows=[sync_fanout_parent, sync_fanout_child],\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/fanout_sync/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/hatchet_client.ts b/frontend/docs/lib/generated/snips/python/hatchet_client.ts deleted file mode 100644 index 49c3db68a..000000000 --- a/frontend/docs/lib/generated/snips/python/hatchet_client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Hatchet\n\n# Initialize Hatchet client\nhatchet = Hatchet()\n", - "source": "out/python/hatchet_client.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/index.ts b/frontend/docs/lib/generated/snips/python/index.ts deleted file mode 100644 index 962dc4669..000000000 --- a/frontend/docs/lib/generated/snips/python/index.ts +++ /dev/null @@ -1,99 +0,0 @@ -import __init__ from './__init__'; -import hatchet_client from './hatchet_client'; -import worker from './worker'; -import * as affinity_workers from './affinity_workers'; -import * as api from './api'; -import * as blocked_async from './blocked_async'; -import * as bulk_fanout from './bulk_fanout'; -import * as bulk_operations from './bulk_operations'; -import * as cancellation from './cancellation'; -import * as child from './child'; -import * as concurrency_limit from './concurrency_limit'; -import * as concurrency_limit_rr from './concurrency_limit_rr'; -import * as concurrency_limit_rr_load from './concurrency_limit_rr_load'; -import * as concurrency_multiple_keys from './concurrency_multiple_keys'; -import * as concurrency_workflow_level from './concurrency_workflow_level'; -import * as conditions from './conditions'; -import * as cron from './cron'; -import * as dag from './dag'; -import * as dedupe from './dedupe'; -import * as delayed from './delayed'; -import * as dependency_injection from './dependency_injection'; -import * as durable from './durable'; -import * as durable_event from './durable_event'; -import * as durable_sleep from './durable_sleep'; -import * as events from './events'; -import * as fanout from './fanout'; -import * as fanout_sync from './fanout_sync'; -import * as lifespans from './lifespans'; -import * as logger from './logger'; -import * as manual_slot_release from './manual_slot_release'; -import * as migration_guides from './migration_guides'; -import * as non_retryable from './non_retryable'; -import * as on_failure from './on_failure'; -import * as on_success from './on_success'; -import * as opentelemetry_instrumentation from './opentelemetry_instrumentation'; -import * as priority from './priority'; -import * as quickstart from './quickstart'; -import * as rate_limit from './rate_limit'; -import * as retries from './retries'; -import * as return_exceptions from './return_exceptions'; -import * as scheduled from './scheduled'; -import * as simple from './simple'; -import * as sticky_workers from './sticky_workers'; -import * as streaming from './streaming'; -import * as timeout from './timeout'; -import * as unit_testing from './unit_testing'; -import * as webhooks from './webhooks'; -import * as worker_existing_loop from './worker_existing_loop'; -import * as workflow_registration from './workflow_registration'; - -export { __init__ } -export { hatchet_client } -export { worker } -export { affinity_workers }; -export { api }; -export { blocked_async }; -export { bulk_fanout }; -export { bulk_operations }; -export { cancellation }; -export { child }; -export { concurrency_limit }; -export { concurrency_limit_rr }; -export { concurrency_limit_rr_load }; -export { concurrency_multiple_keys }; -export { concurrency_workflow_level }; -export { conditions }; -export { cron }; -export { dag }; -export { dedupe }; -export { delayed }; -export { dependency_injection }; -export { durable }; -export { durable_event }; -export { durable_sleep }; -export { events }; -export { fanout }; -export { fanout_sync }; -export { lifespans }; -export { logger }; -export { manual_slot_release }; -export { migration_guides }; -export { non_retryable }; -export { on_failure }; -export { on_success }; -export { opentelemetry_instrumentation }; -export { priority }; -export { quickstart }; -export { rate_limit }; -export { retries }; -export { return_exceptions }; -export { scheduled }; -export { simple }; -export { sticky_workers }; -export { streaming }; -export { timeout }; -export { unit_testing }; -export { webhooks }; -export { worker_existing_loop }; -export { workflow_registration }; diff --git a/frontend/docs/lib/generated/snips/python/lifespans/index.ts b/frontend/docs/lib/generated/snips/python/lifespans/index.ts deleted file mode 100644 index f0b3da954..000000000 --- a/frontend/docs/lib/generated/snips/python/lifespans/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import simple from './simple'; -import test_lifespans from './test_lifespans'; -import trigger from './trigger'; -import worker from './worker'; - -export { simple } -export { test_lifespans } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/lifespans/simple.ts b/frontend/docs/lib/generated/snips/python/lifespans/simple.ts deleted file mode 100644 index 9ebf4170a..000000000 --- a/frontend/docs/lib/generated/snips/python/lifespans/simple.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Lifespan\n\nfrom collections.abc import AsyncGenerator\nfrom typing import cast\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass Lifespan(BaseModel):\n foo: str\n pi: float\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n yield Lifespan(foo=\"bar\", pi=3.14)\n\n\n@hatchet.task(name=\"LifespanWorkflow\")\ndef lifespan_task(input: EmptyModel, ctx: Context) -> Lifespan:\n return cast(Lifespan, ctx.lifespan)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"test-worker\", slots=1, workflows=[lifespan_task], lifespan=lifespan\n )\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/lifespans/simple.py", - "blocks": { - "lifespan": { - "start": 2, - "stop": 33 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/lifespans/test_lifespans.ts b/frontend/docs/lib/generated/snips/python/lifespans/test_lifespans.ts deleted file mode 100644 index c9c047f40..000000000 --- a/frontend/docs/lib/generated/snips/python/lifespans/test_lifespans.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import pytest\n\nfrom examples.lifespans.simple import Lifespan, lifespan_task\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_lifespans() -> None:\n result = await lifespan_task.aio_run()\n\n assert isinstance(result, Lifespan)\n assert result.pi == 3.14\n assert result.foo == \"bar\"\n", - "source": "out/python/lifespans/test_lifespans.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/lifespans/trigger.ts b/frontend/docs/lib/generated/snips/python/lifespans/trigger.ts deleted file mode 100644 index c0cc76922..000000000 --- a/frontend/docs/lib/generated/snips/python/lifespans/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.lifespans.worker import lifespan_workflow\n\nresult = lifespan_workflow.run()\n\nprint(result)\n", - "source": "out/python/lifespans/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/lifespans/worker.ts b/frontend/docs/lib/generated/snips/python/lifespans/worker.ts deleted file mode 100644 index 215574691..000000000 --- a/frontend/docs/lib/generated/snips/python/lifespans/worker.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from collections.abc import AsyncGenerator\nfrom typing import cast\nfrom uuid import UUID\n\nfrom psycopg_pool import ConnectionPool\nfrom pydantic import BaseModel, ConfigDict\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Use the lifespan in a task\nclass TaskOutput(BaseModel):\n num_rows: int\n external_ids: list[UUID]\n\n\nlifespan_workflow = hatchet.workflow(name=\"LifespanWorkflow\")\n\n\n@lifespan_workflow.task()\ndef sync_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute(\"SELECT * FROM v1_lookup_table_olap LIMIT 5;\")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print(\"executed sync task with lifespan\", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n\n\n@lifespan_workflow.task()\nasync def async_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute(\"SELECT * FROM v1_lookup_table_olap LIMIT 5;\")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print(\"executed async task with lifespan\", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n# > Define a lifespan\nclass Lifespan(BaseModel):\n model_config = ConfigDict(arbitrary_types_allowed=True)\n\n foo: str\n pool: ConnectionPool\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n print(\"Running lifespan!\")\n with ConnectionPool(\"postgres://hatchet:hatchet@localhost:5431/hatchet\") as pool:\n yield Lifespan(\n foo=\"bar\",\n pool=pool,\n )\n\n print(\"Cleaning up lifespan!\")\n\n\nworker = hatchet.worker(\n \"test-worker\", slots=1, workflows=[lifespan_workflow], lifespan=lifespan\n)\n\n\ndef main() -> None:\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/lifespans/worker.py", - "blocks": { - "use_the_lifespan_in_a_task": { - "start": 14, - "stop": 40 - }, - "define_a_lifespan": { - "start": 63, - "stop": 83 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/logger/client.ts b/frontend/docs/lib/generated/snips/python/logger/client.ts deleted file mode 100644 index 0d59a1741..000000000 --- a/frontend/docs/lib/generated/snips/python/logger/client.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > RootLogger\n\n\nimport logging\n\nfrom hatchet_sdk import ClientConfig, Hatchet\n\nlogging.basicConfig(level=logging.INFO)\n\nroot_logger = logging.getLogger()\n\nhatchet = Hatchet(\n debug=True,\n config=ClientConfig(\n logger=root_logger,\n ),\n)\n\n", - "source": "out/python/logger/client.py", - "blocks": { - "rootlogger": { - "start": 2, - "stop": 18 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/logger/index.ts b/frontend/docs/lib/generated/snips/python/logger/index.ts deleted file mode 100644 index 0c78ae56f..000000000 --- a/frontend/docs/lib/generated/snips/python/logger/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import client from './client'; -import test_logger from './test_logger'; -import trigger from './trigger'; -import worker from './worker'; -import workflow from './workflow'; - -export { client } -export { test_logger } -export { trigger } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/python/logger/test_logger.ts b/frontend/docs/lib/generated/snips/python/logger/test_logger.ts deleted file mode 100644 index 8035e2fe6..000000000 --- a/frontend/docs/lib/generated/snips/python/logger/test_logger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import pytest\n\nfrom examples.logger.workflow import logging_workflow\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run() -> None:\n result = await logging_workflow.aio_run()\n\n assert result[\"root_logger\"][\"status\"] == \"success\"\n", - "source": "out/python/logger/test_logger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/logger/trigger.ts b/frontend/docs/lib/generated/snips/python/logger/trigger.ts deleted file mode 100644 index 8ffb790ba..000000000 --- a/frontend/docs/lib/generated/snips/python/logger/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.logger.workflow import logging_workflow\n\nlogging_workflow.run()\n", - "source": "out/python/logger/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/logger/worker.ts b/frontend/docs/lib/generated/snips/python/logger/worker.ts deleted file mode 100644 index f6f883b52..000000000 --- a/frontend/docs/lib/generated/snips/python/logger/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.logger.client import hatchet\nfrom examples.logger.workflow import logging_workflow\n\n\ndef main() -> None:\n worker = hatchet.worker(\"logger-worker\", slots=5, workflows=[logging_workflow])\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/logger/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/logger/workflow.ts b/frontend/docs/lib/generated/snips/python/logger/workflow.ts deleted file mode 100644 index d164d665a..000000000 --- a/frontend/docs/lib/generated/snips/python/logger/workflow.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > LoggingWorkflow\n\nimport logging\nimport time\n\nfrom examples.logger.client import hatchet\nfrom hatchet_sdk import Context, EmptyModel\n\nlogger = logging.getLogger(__name__)\n\nlogging_workflow = hatchet.workflow(\n name=\"LoggingWorkflow\",\n)\n\n\n@logging_workflow.task()\ndef root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n logger.info(f\"executed step1 - {i}\")\n logger.info({\"step1\": \"step1\"})\n\n time.sleep(0.1)\n\n return {\"status\": \"success\"}\n\n\n\n# > ContextLogger\n\n\n@logging_workflow.task()\ndef context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n ctx.log(f\"executed step1 - {i}\")\n ctx.log({\"step1\": \"step1\"})\n\n time.sleep(0.1)\n\n return {\"status\": \"success\"}\n\n\n", - "source": "out/python/logger/workflow.py", - "blocks": { - "loggingworkflow": { - "start": 2, - "stop": 26 - }, - "contextlogger": { - "start": 29, - "stop": 41 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/manual_slot_release/index.ts b/frontend/docs/lib/generated/snips/python/manual_slot_release/index.ts deleted file mode 100644 index 30b1adbeb..000000000 --- a/frontend/docs/lib/generated/snips/python/manual_slot_release/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/manual_slot_release/worker.ts b/frontend/docs/lib/generated/snips/python/manual_slot_release/worker.ts deleted file mode 100644 index f908426cd..000000000 --- a/frontend/docs/lib/generated/snips/python/manual_slot_release/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet()\n\n# > SlotRelease\n\nslot_release_workflow = hatchet.workflow(name=\"SlotReleaseWorkflow\")\n\n\n@slot_release_workflow.task()\ndef step1(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\"RESOURCE INTENSIVE PROCESS\")\n time.sleep(10)\n\n # 👀 Release the slot after the resource-intensive process, so that other steps can run\n ctx.release_slot()\n\n print(\"NON RESOURCE INTENSIVE PROCESS\")\n return {\"status\": \"success\"}\n\n\n", - "source": "out/python/manual_slot_release/worker.py", - "blocks": { - "slotrelease": { - "start": 8, - "stop": 23 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/migration_guides/__init__.ts b/frontend/docs/lib/generated/snips/python/migration_guides/__init__.ts deleted file mode 100644 index ec2fbfd51..000000000 --- a/frontend/docs/lib/generated/snips/python/migration_guides/__init__.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "", - "source": "out/python/migration_guides/__init__.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/migration_guides/hatchet_client.ts b/frontend/docs/lib/generated/snips/python/migration_guides/hatchet_client.ts deleted file mode 100644 index 811b63bd8..000000000 --- a/frontend/docs/lib/generated/snips/python/migration_guides/hatchet_client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n", - "source": "out/python/migration_guides/hatchet_client.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/migration_guides/index.ts b/frontend/docs/lib/generated/snips/python/migration_guides/index.ts deleted file mode 100644 index dbe5b13ec..000000000 --- a/frontend/docs/lib/generated/snips/python/migration_guides/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import __init__ from './__init__'; -import hatchet_client from './hatchet_client'; -import mergent from './mergent'; - -export { __init__ } -export { hatchet_client } -export { mergent } diff --git a/frontend/docs/lib/generated/snips/python/migration_guides/mergent.ts b/frontend/docs/lib/generated/snips/python/migration_guides/mergent.ts deleted file mode 100644 index ccd6d60e4..000000000 --- a/frontend/docs/lib/generated/snips/python/migration_guides/mergent.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from collections.abc import Mapping\nfrom datetime import datetime, timedelta, timezone\nfrom typing import Any\n\nimport requests\nfrom pydantic import BaseModel\nfrom requests import Response\n\nfrom hatchet_sdk.context.context import Context\n\nfrom .hatchet_client import hatchet\n\n\nasync def process_image(image_url: str, filters: list[str]) -> dict[str, Any]:\n # Do some image processing\n return {\"url\": image_url, \"size\": 100, \"format\": \"png\"}\n\n\n# > Before (Mergent)\nasync def process_image_task(request: Any) -> dict[str, Any]:\n image_url = request.json[\"image_url\"]\n filters = request.json[\"filters\"]\n try:\n result = await process_image(image_url, filters)\n return {\"success\": True, \"processed_url\": result[\"url\"]}\n except Exception as e:\n print(f\"Image processing failed: {e}\")\n raise\n\n\n\n\n# > After (Hatchet)\nclass ImageProcessInput(BaseModel):\n image_url: str\n filters: list[str]\n\n\nclass ImageProcessOutput(BaseModel):\n processed_url: str\n metadata: dict[str, Any]\n\n\n@hatchet.task(\n name=\"image-processor\",\n retries=3,\n execution_timeout=\"10m\",\n input_validator=ImageProcessInput,\n)\nasync def image_processor(input: ImageProcessInput, ctx: Context) -> ImageProcessOutput:\n # Do some image processing\n result = await process_image(input.image_url, input.filters)\n\n if not result[\"url\"]:\n raise ValueError(\"Processing failed to generate URL\")\n\n return ImageProcessOutput(\n processed_url=result[\"url\"],\n metadata={\n \"size\": result[\"size\"],\n \"format\": result[\"format\"],\n \"applied_filters\": input.filters,\n },\n )\n\n\n\n\nasync def run() -> None:\n # > Running a task (Mergent)\n headers: Mapping[str, str] = {\n \"Authorization\": \"Bearer \",\n \"Content-Type\": \"application/json\",\n }\n\n task_data = {\n \"name\": \"4cf95241-fa19-47ef-8a67-71e483747649\",\n \"queue\": \"default\",\n \"request\": {\n \"url\": \"https://example.com\",\n \"headers\": {\n \"Authorization\": \"fake-secret-token\",\n \"Content-Type\": \"application/json\",\n },\n \"body\": \"Hello, world!\",\n },\n }\n\n try:\n response: Response = requests.post(\n \"https://api.mergent.co/v2/tasks\",\n headers=headers,\n json=task_data,\n )\n print(response.json())\n except Exception as e:\n print(f\"Error: {e}\")\n\n # > Running a task (Hatchet)\n result = await image_processor.aio_run(\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"])\n )\n\n # you can await fully typed results\n print(result)\n\n\nasync def schedule() -> None:\n # > Scheduling tasks (Mergent)\n options = {\n # same options as before\n \"json\": {\n # same body as before\n \"delay\": \"5m\"\n }\n }\n\n print(options)\n\n # > Scheduling tasks (Hatchet)\n # Schedule the task to run at a specific time\n run_at = datetime.now(tz=timezone.utc) + timedelta(days=1)\n await image_processor.aio_schedule(\n run_at,\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"]),\n )\n\n # Schedule the task to run every hour\n await image_processor.aio_create_cron(\n \"run-hourly\",\n \"0 * * * *\",\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"]),\n )\n", - "source": "out/python/migration_guides/mergent.py", - "blocks": { - "before_mergent": { - "start": 20, - "stop": 30 - }, - "after_hatchet": { - "start": 34, - "stop": 66 - }, - "running_a_task_mergent": { - "start": 71, - "stop": 97 - }, - "running_a_task_hatchet": { - "start": 100, - "stop": 105 - }, - "scheduling_tasks_mergent": { - "start": 110, - "stop": 116 - }, - "scheduling_tasks_hatchet": { - "start": 121, - "stop": 133 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/non_retryable/index.ts b/frontend/docs/lib/generated/snips/python/non_retryable/index.ts deleted file mode 100644 index dff1f8f6a..000000000 --- a/frontend/docs/lib/generated/snips/python/non_retryable/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_no_retry from './test_no_retry'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_no_retry } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/non_retryable/test_no_retry.ts b/frontend/docs/lib/generated/snips/python/non_retryable/test_no_retry.ts deleted file mode 100644 index feb84ecbe..000000000 --- a/frontend/docs/lib/generated/snips/python/non_retryable/test_no_retry.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nimport pytest\n\nfrom examples.non_retryable.worker import (\n non_retryable_workflow,\n should_not_retry,\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n)\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType\nfrom hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails\nfrom hatchet_sdk.exceptions import FailedTaskRunExceptionGroup\n\n\ndef find_id(runs: V1WorkflowRunDetails, match: str) -> str:\n return next(t.metadata.id for t in runs.tasks if match in t.display_name)\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_no_retry(hatchet: Hatchet) -> None:\n ref = await non_retryable_workflow.aio_run_no_wait()\n\n with pytest.raises(FailedTaskRunExceptionGroup) as exc_info:\n await ref.aio_result()\n\n exception_group = exc_info.value\n\n assert len(exception_group.exceptions) == 2\n\n exc_text = [e.exc for e in exception_group.exceptions]\n\n non_retries = [\n e\n for e in exc_text\n if \"This task should retry because it's not a NonRetryableException\" in e\n ]\n\n other_errors = [e for e in exc_text if \"This task should not retry\" in e]\n\n assert len(non_retries) == 1\n assert len(other_errors) == 1\n\n await asyncio.sleep(3)\n\n runs = await hatchet.runs.aio_get(ref.workflow_run_id)\n task_to_id = {\n task: find_id(runs, task.name)\n for task in [\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n should_not_retry,\n ]\n }\n\n retrying_events = [\n e for e in runs.task_events if e.event_type == V1TaskEventType.RETRYING\n ]\n\n \"\"\"Only one task should be retried.\"\"\"\n assert len(retrying_events) == 1\n\n \"\"\"The task id of the retrying events should match the tasks that are retried\"\"\"\n assert retrying_events[0].task_id == task_to_id[should_retry_wrong_exception_type]\n\n \"\"\"Three failed events should emit, one each for the two failing initial runs and one for the retry.\"\"\"\n assert (\n len([e for e in runs.task_events if e.event_type == V1TaskEventType.FAILED])\n == 3\n )\n", - "source": "out/python/non_retryable/test_no_retry.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/non_retryable/trigger.ts b/frontend/docs/lib/generated/snips/python/non_retryable/trigger.ts deleted file mode 100644 index e48a7a8d9..000000000 --- a/frontend/docs/lib/generated/snips/python/non_retryable/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.non_retryable.worker import non_retryable_workflow\n\nnon_retryable_workflow.run_no_wait()\n", - "source": "out/python/non_retryable/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/non_retryable/worker.ts b/frontend/docs/lib/generated/snips/python/non_retryable/worker.ts deleted file mode 100644 index 55a6348a4..000000000 --- a/frontend/docs/lib/generated/snips/python/non_retryable/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Context, EmptyModel, Hatchet\nfrom hatchet_sdk.exceptions import NonRetryableException\n\nhatchet = Hatchet(debug=True)\n\nnon_retryable_workflow = hatchet.workflow(name=\"NonRetryableWorkflow\")\n\n\n# > Non-retryable task\n@non_retryable_workflow.task(retries=1)\ndef should_not_retry(input: EmptyModel, ctx: Context) -> None:\n raise NonRetryableException(\"This task should not retry\")\n\n\n\n\n@non_retryable_workflow.task(retries=1)\ndef should_retry_wrong_exception_type(input: EmptyModel, ctx: Context) -> None:\n raise TypeError(\"This task should retry because it's not a NonRetryableException\")\n\n\n@non_retryable_workflow.task(retries=1)\ndef should_not_retry_successful_task(input: EmptyModel, ctx: Context) -> None:\n pass\n\n\ndef main() -> None:\n worker = hatchet.worker(\"non-retry-worker\", workflows=[non_retryable_workflow])\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/non_retryable/worker.py", - "blocks": { - "non_retryable_task": { - "start": 10, - "stop": 14 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/on_failure/index.ts b/frontend/docs/lib/generated/snips/python/on_failure/index.ts deleted file mode 100644 index c0d473198..000000000 --- a/frontend/docs/lib/generated/snips/python/on_failure/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_on_failure from './test_on_failure'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_on_failure } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/on_failure/test_on_failure.ts b/frontend/docs/lib/generated/snips/python/on_failure/test_on_failure.ts deleted file mode 100644 index f80640e44..000000000 --- a/frontend/docs/lib/generated/snips/python/on_failure/test_on_failure.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nimport pytest\n\nfrom examples.on_failure.worker import on_failure_wf\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run_timeout(hatchet: Hatchet) -> None:\n run = on_failure_wf.run_no_wait()\n try:\n await run.aio_result()\n\n assert False, \"Expected workflow to timeout\"\n except Exception as e:\n assert \"step1 failed\" in str(e)\n\n await asyncio.sleep(5) # Wait for the on_failure job to finish\n\n details = await hatchet.runs.aio_get(run.workflow_run_id)\n\n assert len(details.tasks) == 2\n assert sum(t.status == V1TaskStatus.COMPLETED for t in details.tasks) == 1\n assert sum(t.status == V1TaskStatus.FAILED for t in details.tasks) == 1\n\n completed_task = next(\n t for t in details.tasks if t.status == V1TaskStatus.COMPLETED\n )\n failed_task = next(t for t in details.tasks if t.status == V1TaskStatus.FAILED)\n\n assert \"on_failure\" in completed_task.display_name\n assert \"step1\" in failed_task.display_name\n", - "source": "out/python/on_failure/test_on_failure.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/on_failure/trigger.ts b/frontend/docs/lib/generated/snips/python/on_failure/trigger.ts deleted file mode 100644 index 4bbe85563..000000000 --- a/frontend/docs/lib/generated/snips/python/on_failure/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.on_failure.worker import on_failure_wf_with_details\n\non_failure_wf_with_details.run_no_wait()\n", - "source": "out/python/on_failure/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/on_failure/worker.ts b/frontend/docs/lib/generated/snips/python/on_failure/worker.ts deleted file mode 100644 index 9e0c79fb7..000000000 --- a/frontend/docs/lib/generated/snips/python/on_failure/worker.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import json\nfrom datetime import timedelta\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\nfrom hatchet_sdk.exceptions import TaskRunError\n\nhatchet = Hatchet(debug=False)\n\nERROR_TEXT = \"step1 failed\"\n\n# > OnFailure Step\n# This workflow will fail because the step will throw an error\n# we define an onFailure step to handle this case\n\non_failure_wf = hatchet.workflow(name=\"OnFailureWorkflow\")\n\n\n@on_failure_wf.task(execution_timeout=timedelta(seconds=1))\ndef step1(input: EmptyModel, ctx: Context) -> None:\n # 👀 this step will always raise an exception\n raise Exception(ERROR_TEXT)\n\n\n# 👀 After the workflow fails, this special step will run\n@on_failure_wf.on_failure_task()\ndef on_failure(input: EmptyModel, ctx: Context) -> dict[str, str]:\n # 👀 we can do things like perform cleanup logic\n # or notify a user here\n\n # 👀 Fetch the errors from upstream step runs from the context\n print(ctx.task_run_errors)\n\n return {\"status\": \"success\"}\n\n\n\n\n# > OnFailure With Details\n# We can access the failure details in the onFailure step\n# via the context method\n\non_failure_wf_with_details = hatchet.workflow(name=\"OnFailureWorkflowWithDetails\")\n\n\n# ... defined as above\n@on_failure_wf_with_details.task(execution_timeout=timedelta(seconds=1))\ndef details_step1(input: EmptyModel, ctx: Context) -> None:\n raise Exception(ERROR_TEXT)\n\n\n# 👀 After the workflow fails, this special step will run\n@on_failure_wf_with_details.on_failure_task()\ndef details_on_failure(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n error = ctx.get_task_run_error(details_step1)\n\n if not error:\n return {\"status\": \"unexpected success\"}\n\n # 👀 we can access the failure details here\n assert isinstance(error, TaskRunError)\n\n if \"step1 failed\" in error.exc:\n return {\n \"status\": \"success\",\n \"failed_run_external_id\": error.task_run_external_id,\n }\n\n raise Exception(\"unexpected failure\")\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"on-failure-worker\",\n slots=4,\n workflows=[on_failure_wf, on_failure_wf_with_details],\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/on_failure/worker.py", - "blocks": { - "onfailure_step": { - "start": 12, - "stop": 35 - }, - "onfailure_with_details": { - "start": 39, - "stop": 70 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/on_success/index.ts b/frontend/docs/lib/generated/snips/python/on_success/index.ts deleted file mode 100644 index c443f556e..000000000 --- a/frontend/docs/lib/generated/snips/python/on_success/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import trigger from './trigger'; -import worker from './worker'; - -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/on_success/trigger.ts b/frontend/docs/lib/generated/snips/python/on_success/trigger.ts deleted file mode 100644 index 89ca00f29..000000000 --- a/frontend/docs/lib/generated/snips/python/on_success/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.on_success.worker import on_success_workflow\n\non_success_workflow.run_no_wait()\n", - "source": "out/python/on_success/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/on_success/worker.ts b/frontend/docs/lib/generated/snips/python/on_success/worker.ts deleted file mode 100644 index 38be64a90..000000000 --- a/frontend/docs/lib/generated/snips/python/on_success/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\non_success_workflow = hatchet.workflow(name=\"OnSuccessWorkflow\")\n\n\n@on_success_workflow.task()\ndef first_task(input: EmptyModel, ctx: Context) -> None:\n print(\"First task completed successfully\")\n\n\n@on_success_workflow.task(parents=[first_task])\ndef second_task(input: EmptyModel, ctx: Context) -> None:\n print(\"Second task completed successfully\")\n\n\n@on_success_workflow.task(parents=[first_task, second_task])\ndef third_task(input: EmptyModel, ctx: Context) -> None:\n print(\"Third task completed successfully\")\n\n\n@on_success_workflow.task()\ndef fourth_task(input: EmptyModel, ctx: Context) -> None:\n print(\"Fourth task completed successfully\")\n\n\n@on_success_workflow.on_success_task()\ndef on_success_task(input: EmptyModel, ctx: Context) -> None:\n print(\"On success task completed successfully\")\n\n\ndef main() -> None:\n worker = hatchet.worker(\"on-success-worker\", workflows=[on_success_workflow])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/on_success/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/client.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/client.ts deleted file mode 100644 index b0280d667..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n", - "source": "out/python/opentelemetry_instrumentation/client.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/index.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/index.ts deleted file mode 100644 index 514846f51..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import client from './client'; -import tracer from './tracer'; -import triggers from './triggers'; -import worker from './worker'; -import * as langfuse from './langfuse'; - -export { client } -export { tracer } -export { triggers } -export { worker } -export { langfuse }; diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/client.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/client.ts deleted file mode 100644 index ad273fc35..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/client.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import base64\nimport os\n\nfrom langfuse import Langfuse # type: ignore\nfrom langfuse.openai import AsyncOpenAI # type: ignore\n\n# > Configure Langfuse\nLANGFUSE_AUTH = base64.b64encode(\n f\"{os.getenv('LANGFUSE_PUBLIC_KEY')}:{os.getenv('LANGFUSE_SECRET_KEY')}\".encode()\n).decode()\n\nos.environ[\"OTEL_EXPORTER_OTLP_ENDPOINT\"] = (\n os.getenv(\"LANGFUSE_HOST\", \"https://us.cloud.langfuse.com\") + \"/api/public/otel\"\n)\nos.environ[\"OTEL_EXPORTER_OTLP_HEADERS\"] = f\"Authorization=Basic {LANGFUSE_AUTH}\"\n\n## Note: Langfuse sets the global tracer provider, so you don't need to worry about it\nlf = Langfuse(\n public_key=os.getenv(\"LANGFUSE_PUBLIC_KEY\"),\n secret_key=os.getenv(\"LANGFUSE_SECRET_KEY\"),\n host=os.getenv(\"LANGFUSE_HOST\", \"https://app.langfuse.com\"),\n)\n\n# > Create OpenAI client\nopenai = AsyncOpenAI(\n api_key=os.getenv(\"OPENAI_API_KEY\"),\n)\n", - "source": "out/python/opentelemetry_instrumentation/langfuse/client.py", - "blocks": { - "configure_langfuse": { - "start": 8, - "stop": 22 - }, - "create_openai_client": { - "start": 25, - "stop": 27 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/index.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/index.ts deleted file mode 100644 index 57a252fd1..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import client from './client'; -import trigger from './trigger'; -import worker from './worker'; - -export { client } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/trigger.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/trigger.ts deleted file mode 100644 index cac722338..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/trigger.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# type: ignore\n\nimport asyncio\n\nfrom langfuse import get_client # type: ignore\nfrom opentelemetry.trace import StatusCode\n\nfrom examples.opentelemetry_instrumentation.langfuse.worker import langfuse_task\n\n# > Trigger task\ntracer = get_client()\n\n\nasync def main() -> None:\n # Traces will send to Langfuse\n # Use `_otel_tracer` to access the OpenTelemetry tracer if you need\n # to e.g. log statuses or attributes manually.\n with tracer._otel_tracer.start_as_current_span(name=\"trigger\") as span:\n result = await langfuse_task.aio_run()\n location = result.get(\"location\")\n\n if not location:\n span.set_status(StatusCode.ERROR)\n return\n\n span.set_attribute(\"location\", location)\n\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/opentelemetry_instrumentation/langfuse/trigger.py", - "blocks": { - "trigger_task": { - "start": 11, - "stop": 28 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/worker.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/worker.ts deleted file mode 100644 index 035f4b7da..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/langfuse/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from opentelemetry.trace import get_tracer_provider\n\nfrom examples.opentelemetry_instrumentation.langfuse.client import openai\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\nfrom hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor\n\n# > Task\nHatchetInstrumentor(\n ## Langfuse sets the global tracer provider\n tracer_provider=get_tracer_provider(),\n).instrument()\n\nhatchet = Hatchet()\n\n\n@hatchet.task()\nasync def langfuse_task(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n ## Usage, cost, etc. of this call will be send to Langfuse\n generation = await openai.chat.completions.create(\n model=\"gpt-4o-mini\",\n messages=[\n {\"role\": \"system\", \"content\": \"You are a helpful assistant.\"},\n {\"role\": \"user\", \"content\": \"Where does Anna Karenina take place?\"},\n ],\n )\n\n location = generation.choices[0].message.content\n\n return {\n \"location\": location,\n }\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"langfuse-example-worker\", workflows=[langfuse_task])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/opentelemetry_instrumentation/langfuse/worker.py", - "blocks": { - "task": { - "start": 8, - "stop": 33 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/tracer.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/tracer.ts deleted file mode 100644 index f21afe412..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/tracer.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import os\nfrom typing import cast\n\nfrom opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter\nfrom opentelemetry.sdk.resources import SERVICE_NAME, Resource\nfrom opentelemetry.sdk.trace import TracerProvider\nfrom opentelemetry.sdk.trace.export import BatchSpanProcessor\nfrom opentelemetry.trace import NoOpTracerProvider\n\ntrace_provider: TracerProvider | NoOpTracerProvider\n\nif os.getenv(\"CI\", \"false\") == \"true\":\n trace_provider = NoOpTracerProvider()\nelse:\n resource = Resource(\n attributes={\n SERVICE_NAME: os.getenv(\"HATCHET_CLIENT_OTEL_SERVICE_NAME\", \"test-service\")\n }\n )\n\n headers = dict(\n [\n cast(\n tuple[str, str],\n tuple(\n os.getenv(\n \"HATCHET_CLIENT_OTEL_EXPORTER_OTLP_HEADERS\", \"foo=bar\"\n ).split(\"=\")\n ),\n )\n ]\n )\n\n processor = BatchSpanProcessor(\n OTLPSpanExporter(\n endpoint=os.getenv(\n \"HATCHET_CLIENT_OTEL_EXPORTER_OTLP_ENDPOINT\", \"http://localhost:4317\"\n ),\n headers=headers,\n ),\n )\n\n trace_provider = TracerProvider(resource=resource)\n\n trace_provider.add_span_processor(processor)\n", - "source": "out/python/opentelemetry_instrumentation/tracer.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/triggers.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/triggers.ts deleted file mode 100644 index 5760f4509..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/triggers.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom examples.opentelemetry_instrumentation.client import hatchet\nfrom examples.opentelemetry_instrumentation.tracer import trace_provider\nfrom examples.opentelemetry_instrumentation.worker import otel_workflow\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\nfrom hatchet_sdk.clients.events import BulkPushEventWithMetadata, PushEventOptions\nfrom hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor\n\ninstrumentor = HatchetInstrumentor(tracer_provider=trace_provider)\ntracer = trace_provider.get_tracer(__name__)\n\n\nADDITIONAL_METADATA = {\"hello\": \"world\"}\n\n\ndef create_push_options() -> PushEventOptions:\n return PushEventOptions(additional_metadata=ADDITIONAL_METADATA)\n\n\ndef push_event() -> None:\n print(\"\\npush_event\")\n with tracer.start_as_current_span(\"push_event\"):\n hatchet.event.push(\n \"otel:event\",\n {\"test\": \"test\"},\n options=create_push_options(),\n )\n\n\nasync def async_push_event() -> None:\n print(\"\\nasync_push_event\")\n with tracer.start_as_current_span(\"async_push_event\"):\n await hatchet.event.aio_push(\n \"otel:event\", {\"test\": \"test\"}, options=create_push_options()\n )\n\n\ndef bulk_push_event() -> None:\n print(\"\\nbulk_push_event\")\n with tracer.start_as_current_span(\"bulk_push_event\"):\n hatchet.event.bulk_push(\n [\n BulkPushEventWithMetadata(\n key=\"otel:event\",\n payload={\"test\": \"test 1\"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n BulkPushEventWithMetadata(\n key=\"otel:event\",\n payload={\"test\": \"test 2\"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n ],\n )\n\n\nasync def async_bulk_push_event() -> None:\n print(\"\\nasync_bulk_push_event\")\n with tracer.start_as_current_span(\"bulk_push_event\"):\n await hatchet.event.aio_bulk_push(\n [\n BulkPushEventWithMetadata(\n key=\"otel:event\",\n payload={\"test\": \"test 1\"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n BulkPushEventWithMetadata(\n key=\"otel:event\",\n payload={\"test\": \"test 2\"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n ],\n )\n\n\ndef run_workflow() -> None:\n print(\"\\nrun_workflow\")\n with tracer.start_as_current_span(\"run_workflow\"):\n otel_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),\n )\n\n\nasync def async_run_workflow() -> None:\n print(\"\\nasync_run_workflow\")\n with tracer.start_as_current_span(\"async_run_workflow\"):\n await otel_workflow.aio_run(\n options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),\n )\n\n\ndef run_workflows() -> None:\n print(\"\\nrun_workflows\")\n with tracer.start_as_current_span(\"run_workflows\"):\n otel_workflow.run_many(\n [\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n ],\n )\n\n\nasync def async_run_workflows() -> None:\n print(\"\\nasync_run_workflows\")\n with tracer.start_as_current_span(\"async_run_workflows\"):\n await otel_workflow.aio_run_many(\n [\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n ],\n )\n\n\nasync def main() -> None:\n push_event()\n await async_push_event()\n bulk_push_event()\n await async_bulk_push_event()\n run_workflow()\n # await async_run_workflow()\n run_workflows()\n # await async_run_workflows()\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/opentelemetry_instrumentation/triggers.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/worker.ts b/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/worker.ts deleted file mode 100644 index e673efad8..000000000 --- a/frontend/docs/lib/generated/snips/python/opentelemetry_instrumentation/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.opentelemetry_instrumentation.client import hatchet\nfrom examples.opentelemetry_instrumentation.tracer import trace_provider\nfrom hatchet_sdk import Context, EmptyModel\nfrom hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor\n\nHatchetInstrumentor(\n tracer_provider=trace_provider,\n).instrument()\n\notel_workflow = hatchet.workflow(\n name=\"OTelWorkflow\",\n)\n\n\n@otel_workflow.task()\ndef your_spans_are_children_of_hatchet_span(\n input: EmptyModel, ctx: Context\n) -> dict[str, str]:\n with trace_provider.get_tracer(__name__).start_as_current_span(\"step1\"):\n print(\"executed step\")\n return {\n \"foo\": \"bar\",\n }\n\n\n@otel_workflow.task()\ndef your_spans_are_still_children_of_hatchet_span(\n input: EmptyModel, ctx: Context\n) -> None:\n with trace_provider.get_tracer(__name__).start_as_current_span(\"step2\"):\n raise Exception(\"Manually instrumented step failed failed\")\n\n\n@otel_workflow.task()\ndef this_step_is_still_instrumented(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\"executed still-instrumented step\")\n return {\n \"still\": \"instrumented\",\n }\n\n\n@otel_workflow.task()\ndef this_step_is_also_still_instrumented(input: EmptyModel, ctx: Context) -> None:\n raise Exception(\"Still-instrumented step failed\")\n\n\ndef main() -> None:\n worker = hatchet.worker(\"otel-example-worker\", slots=1, workflows=[otel_workflow])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/opentelemetry_instrumentation/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/priority/index.ts b/frontend/docs/lib/generated/snips/python/priority/index.ts deleted file mode 100644 index ee66ff332..000000000 --- a/frontend/docs/lib/generated/snips/python/priority/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_priority from './test_priority'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_priority } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/priority/test_priority.ts b/frontend/docs/lib/generated/snips/python/priority/test_priority.ts deleted file mode 100644 index 41e2e9222..000000000 --- a/frontend/docs/lib/generated/snips/python/priority/test_priority.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom collections.abc import AsyncGenerator\nfrom datetime import datetime, timedelta, timezone\nfrom random import choice\nfrom subprocess import Popen\nfrom typing import Any, Literal\nfrom uuid import uuid4\n\nimport pytest\nimport pytest_asyncio\nfrom pydantic import BaseModel\n\nfrom examples.priority.worker import DEFAULT_PRIORITY, SLEEP_TIME, priority_workflow\nfrom hatchet_sdk import Hatchet, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\nPriority = Literal[\"low\", \"medium\", \"high\", \"default\"]\n\n\nclass RunPriorityStartedAt(BaseModel):\n priority: Priority\n started_at: datetime\n finished_at: datetime\n\n\ndef priority_to_int(priority: Priority) -> int:\n match priority:\n case \"high\":\n return 3\n case \"medium\":\n return 2\n case \"low\":\n return 1\n case \"default\":\n return DEFAULT_PRIORITY\n case _:\n raise ValueError(f\"Invalid priority: {priority}\")\n\n\n@pytest_asyncio.fixture(loop_scope=\"session\", scope=\"function\")\nasync def dummy_runs() -> None:\n priority: Priority = \"high\"\n\n await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority)),\n additional_metadata={\n \"priority\": priority,\n \"key\": ix,\n \"type\": \"dummy\",\n },\n )\n )\n for ix in range(40)\n ]\n )\n\n await asyncio.sleep(3)\n\n return\n\n\n@pytest.mark.parametrize(\n \"on_demand_worker\",\n [\n (\n [\"poetry\", \"run\", \"python\", \"examples/priority/worker.py\", \"--slots\", \"1\"],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_priority(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n choices: list[Priority] = [\"low\", \"medium\", \"high\", \"default\"]\n N = 30\n\n run_refs = await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n \"priority\": priority,\n \"key\": ix,\n \"test_run_id\": test_run_id,\n },\n )\n )\n for ix in range(N)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(workflow_name=priority_workflow.name)\n ).rows\n\n assert workflows\n\n workflow = next((w for w in workflows if w.name == priority_workflow.name), None)\n\n assert workflow\n\n assert workflow.name == priority_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n \"test_run_id\": test_run_id,\n },\n limit=1_000,\n )\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\"priority\") or \"low\",\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(run_refs)\n assert len(runs_ids_started_ats) == N\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \"\"\"Run start times should be in order of priority\"\"\"\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \"\"\"Runs should proceed one at a time\"\"\"\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \"\"\"Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\"\"\"\n assert curr.finished_at >= curr.started_at\n\n\n@pytest.mark.parametrize(\n \"on_demand_worker\",\n [\n (\n [\"poetry\", \"run\", \"python\", \"examples/priority/worker.py\", \"--slots\", \"1\"],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_priority_via_scheduling(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n sleep_time = 3\n n = 30\n choices: list[Priority] = [\"low\", \"medium\", \"high\", \"default\"]\n run_at = datetime.now(tz=timezone.utc) + timedelta(seconds=sleep_time)\n\n versions = await asyncio.gather(\n *[\n priority_workflow.aio_schedule(\n run_at=run_at,\n options=ScheduleTriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n \"priority\": priority,\n \"key\": ix,\n \"test_run_id\": test_run_id,\n },\n ),\n )\n for ix in range(n)\n ]\n )\n\n await asyncio.sleep(sleep_time * 2)\n\n workflow_id = versions[0].workflow_id\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError(\"Timed out waiting for runs to finish\")\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n \"test_run_id\": test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError(\"One or more runs failed or were cancelled\")\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\"priority\") or \"low\",\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(versions)\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \"\"\"Run start times should be in order of priority\"\"\"\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \"\"\"Runs should proceed one at a time\"\"\"\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \"\"\"Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\"\"\"\n assert curr.finished_at >= curr.started_at\n\n\n@pytest_asyncio.fixture(loop_scope=\"session\", scope=\"function\")\nasync def crons(\n hatchet: Hatchet, dummy_runs: None\n) -> AsyncGenerator[tuple[str, str, int], None]:\n test_run_id = str(uuid4())\n choices: list[Priority] = [\"low\", \"medium\", \"high\"]\n n = 30\n\n crons = await asyncio.gather(\n *[\n hatchet.cron.aio_create(\n workflow_name=priority_workflow.name,\n cron_name=f\"{test_run_id}-cron-{i}\",\n expression=\"* * * * *\",\n input={},\n additional_metadata={\n \"trigger\": \"cron\",\n \"test_run_id\": test_run_id,\n \"priority\": (priority := choice(choices)),\n \"key\": str(i),\n },\n priority=(priority_to_int(priority)),\n )\n for i in range(n)\n ]\n )\n\n yield crons[0].workflow_id, test_run_id, n\n\n await asyncio.gather(*[hatchet.cron.aio_delete(cron.metadata.id) for cron in crons])\n\n\ndef time_until_next_minute() -> float:\n now = datetime.now(tz=timezone.utc)\n next_minute = (now + timedelta(minutes=1)).replace(second=0, microsecond=0)\n\n return (next_minute - now).total_seconds()\n\n\n@pytest.mark.skip(\n reason=\"Test is flaky because the first jobs that are picked up don't necessarily go in priority order\"\n)\n@pytest.mark.parametrize(\n \"on_demand_worker\",\n [\n (\n [\"poetry\", \"run\", \"python\", \"examples/priority/worker.py\", \"--slots\", \"1\"],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_priority_via_cron(\n hatchet: Hatchet, crons: tuple[str, str, int], on_demand_worker: Popen[Any]\n) -> None:\n workflow_id, test_run_id, n = crons\n\n await asyncio.sleep(time_until_next_minute() + 10)\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError(\"Timed out waiting for runs to finish\")\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n \"test_run_id\": test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError(\"One or more runs failed or were cancelled\")\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\"priority\") or \"low\",\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == n\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \"\"\"Run start times should be in order of priority\"\"\"\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \"\"\"Runs should proceed one at a time\"\"\"\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \"\"\"Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\"\"\"\n assert curr.finished_at >= curr.started_at\n", - "source": "out/python/priority/test_priority.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/priority/trigger.ts b/frontend/docs/lib/generated/snips/python/priority/trigger.ts deleted file mode 100644 index c988c459b..000000000 --- a/frontend/docs/lib/generated/snips/python/priority/trigger.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from datetime import datetime, timedelta, timezone\n\nfrom examples.priority.worker import priority_workflow\nfrom hatchet_sdk import ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions\n\npriority_workflow.run_no_wait()\n\n# > Runtime priority\nlow_prio = priority_workflow.run_no_wait(\n options=TriggerWorkflowOptions(\n ## 👀 Adding priority and key to metadata to show them in the dashboard\n priority=1,\n additional_metadata={\"priority\": \"low\", \"key\": 1},\n )\n)\n\nhigh_prio = priority_workflow.run_no_wait(\n options=TriggerWorkflowOptions(\n ## 👀 Adding priority and key to metadata to show them in the dashboard\n priority=3,\n additional_metadata={\"priority\": \"high\", \"key\": 1},\n )\n)\n\n# > Scheduled priority\nschedule = priority_workflow.schedule(\n run_at=datetime.now(tz=timezone.utc) + timedelta(minutes=1),\n options=ScheduleTriggerWorkflowOptions(priority=3),\n)\n\ncron = priority_workflow.create_cron(\n cron_name=\"my-scheduled-cron\",\n expression=\"0 * * * *\",\n priority=3,\n)\n\n# > Default priority\nlow_prio = priority_workflow.run_no_wait(\n options=TriggerWorkflowOptions(\n ## 👀 Adding priority and key to metadata to show them in the dashboard\n priority=1,\n additional_metadata={\"priority\": \"low\", \"key\": 2},\n )\n)\nhigh_prio = priority_workflow.run_no_wait(\n options=TriggerWorkflowOptions(\n ## 👀 Adding priority and key to metadata to show them in the dashboard\n priority=3,\n additional_metadata={\"priority\": \"high\", \"key\": 2},\n )\n)\n", - "source": "out/python/priority/trigger.py", - "blocks": { - "runtime_priority": { - "start": 9, - "stop": 23 - }, - "scheduled_priority": { - "start": 26, - "stop": 35 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/priority/worker.ts b/frontend/docs/lib/generated/snips/python/priority/worker.ts deleted file mode 100644 index 522edf7f3..000000000 --- a/frontend/docs/lib/generated/snips/python/priority/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# > Default priority\nDEFAULT_PRIORITY = 1\nSLEEP_TIME = 0.25\n\npriority_workflow = hatchet.workflow(\n name=\"PriorityWorkflow\",\n default_priority=DEFAULT_PRIORITY,\n)\n\n\n@priority_workflow.task()\ndef priority_task(input: EmptyModel, ctx: Context) -> None:\n print(\"Priority:\", ctx.priority)\n time.sleep(SLEEP_TIME)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"priority-worker\",\n slots=1,\n workflows=[priority_workflow],\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/priority/worker.py", - "blocks": { - "default_priority": { - "start": 8, - "stop": 14 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/README.ts b/frontend/docs/lib/generated/snips/python/quickstart/README.ts deleted file mode 100644 index cdcf722fb..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/README.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "## Hatchet Python Quickstart\n\nThis is an example project demonstrating how to use Hatchet with Python. For detailed setup instructions, see the [Hatchet Setup Guide](https://docs.hatchet.run/home/setup).\n\n## Prerequisites\n\nBefore running this project, make sure you have the following:\n\n1. [Python v3.10 or higher](https://www.python.org/downloads/)\n2. [Poetry](https://python-poetry.org/docs/#installation) for dependency management\n\n## Setup\n\n1. Clone the repository:\n\n```bash\ngit clone https://github.com/hatchet-dev/hatchet-python-quickstart.git\ncd hatchet-python-quickstart\n```\n\n2. Set the required environment variable `HATCHET_CLIENT_TOKEN` created in the [Getting Started Guide](https://docs.hatchet.run/home/hatchet-cloud-quickstart).\n\n```bash\nexport HATCHET_CLIENT_TOKEN=\n```\n\n> Note: If you're self hosting you may need to set `HATCHET_CLIENT_TLS_STRATEGY=none` to disable TLS\n\n3. Install the project dependencies:\n\n```bash\npoetry install\n```\n\n### Running an example\n\n1. Start a Hatchet worker by running the following command:\n\n```shell\npoetry run python src/worker.py\n```\n\n2. To run the example workflow, open a new terminal and run the following command:\n\n```shell\npoetry run python src/run.py\n```\n\nThis will trigger the workflow on the worker running in the first terminal and print the output to the the second terminal.\n", - "source": "out/python/quickstart/README.md", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/__init__.ts b/frontend/docs/lib/generated/snips/python/quickstart/__init__.ts deleted file mode 100644 index be75c537d..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/__init__.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "", - "source": "out/python/quickstart/__init__.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/gitignore.ts b/frontend/docs/lib/generated/snips/python/quickstart/gitignore.ts deleted file mode 100644 index d96a97f8a..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/gitignore.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "certs/\n\n# Environments\n.env\nenv/\nvenv/\n.venv/\n__pycache__/\n*.py[cod]\n*$py.class\n.Python\n.pytest_cache/\n.coverage\nhtmlcov/\n\n# Distribution / packaging\ndist/\nbuild/\n*.egg-info/\n*.egg\n\n.DS_Store\n\nindex/index.json\n", - "source": "out/python/quickstart/.gitignore", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/hatchet_client.ts b/frontend/docs/lib/generated/snips/python/quickstart/hatchet_client.ts deleted file mode 100644 index 98ec99532..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/hatchet_client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Hatchet\n\n# Initialize Hatchet client\nhatchet = Hatchet()\n", - "source": "out/python/quickstart/hatchet_client.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/index.ts b/frontend/docs/lib/generated/snips/python/quickstart/index.ts deleted file mode 100644 index fa5a7dad8..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -import readme from './README'; -import __init__ from './__init__'; -import gitignore from './gitignore'; -import hatchet_client from './hatchet_client'; -import poetry from './poetry'; -import pyproject from './pyproject'; -import run from './run'; -import worker from './worker'; -import * as workflows from './workflows'; - -export { readme } -export { __init__ } -export { gitignore } -export { hatchet_client } -export { poetry } -export { pyproject } -export { run } -export { worker } -export { workflows }; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/poetry.ts b/frontend/docs/lib/generated/snips/python/quickstart/poetry.ts deleted file mode 100644 index ffccd02ca..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/poetry.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand.\n\n[[package]]\nname = \"aiohappyeyeballs\"\nversion = \"2.6.1\"\ndescription = \"Happy Eyeballs for asyncio\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"aiohappyeyeballs-2.6.1-py3-none-any.whl\", hash = \"sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8\"},\n {file = \"aiohappyeyeballs-2.6.1.tar.gz\", hash = \"sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558\"},\n]\n\n[[package]]\nname = \"aiohttp\"\nversion = \"3.11.14\"\ndescription = \"Async http client/server framework (asyncio)\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"aiohttp-3.11.14-cp310-cp310-macosx_10_9_universal2.whl\", hash = \"sha256:e2bc827c01f75803de77b134afdbf74fa74b62970eafdf190f3244931d7a5c0d\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-macosx_10_9_x86_64.whl\", hash = \"sha256:e365034c5cf6cf74f57420b57682ea79e19eb29033399dd3f40de4d0171998fa\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-macosx_11_0_arm64.whl\", hash = \"sha256:c32593ead1a8c6aabd58f9d7ee706e48beac796bb0cb71d6b60f2c1056f0a65f\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:b4e7c7ec4146a94a307ca4f112802a8e26d969018fabed526efc340d21d3e7d0\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:c8b2df9feac55043759aa89f722a967d977d80f8b5865a4153fc41c93b957efc\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:c7571f99525c76a6280f5fe8e194eeb8cb4da55586c3c61c59c33a33f10cfce7\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:b59d096b5537ec7c85954cb97d821aae35cfccce3357a2cafe85660cc6295628\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:b42dbd097abb44b3f1156b4bf978ec5853840802d6eee2784857be11ee82c6a0\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-musllinux_1_2_aarch64.whl\", hash = \"sha256:b05774864c87210c531b48dfeb2f7659407c2dda8643104fb4ae5e2c311d12d9\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-musllinux_1_2_armv7l.whl\", hash = \"sha256:4e2e8ef37d4bc110917d038807ee3af82700a93ab2ba5687afae5271b8bc50ff\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-musllinux_1_2_i686.whl\", hash = \"sha256:e9faafa74dbb906b2b6f3eb9942352e9e9db8d583ffed4be618a89bd71a4e914\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-musllinux_1_2_ppc64le.whl\", hash = \"sha256:7e7abe865504f41b10777ac162c727af14e9f4db9262e3ed8254179053f63e6d\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-musllinux_1_2_s390x.whl\", hash = \"sha256:4848ae31ad44330b30f16c71e4f586cd5402a846b11264c412de99fa768f00f3\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-musllinux_1_2_x86_64.whl\", hash = \"sha256:2d0b46abee5b5737cb479cc9139b29f010a37b1875ee56d142aefc10686a390b\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-win32.whl\", hash = \"sha256:a0d2c04a623ab83963576548ce098baf711a18e2c32c542b62322a0b4584b990\"},\n {file = \"aiohttp-3.11.14-cp310-cp310-win_amd64.whl\", hash = \"sha256:5409a59d5057f2386bb8b8f8bbcfb6e15505cedd8b2445db510563b5d7ea1186\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-macosx_10_9_universal2.whl\", hash = \"sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-macosx_10_9_x86_64.whl\", hash = \"sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-macosx_11_0_arm64.whl\", hash = \"sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-musllinux_1_2_aarch64.whl\", hash = \"sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-musllinux_1_2_armv7l.whl\", hash = \"sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-musllinux_1_2_i686.whl\", hash = \"sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-musllinux_1_2_ppc64le.whl\", hash = \"sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-musllinux_1_2_s390x.whl\", hash = \"sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-musllinux_1_2_x86_64.whl\", hash = \"sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-win32.whl\", hash = \"sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e\"},\n {file = \"aiohttp-3.11.14-cp311-cp311-win_amd64.whl\", hash = \"sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-macosx_10_13_universal2.whl\", hash = \"sha256:70ab0f61c1a73d3e0342cedd9a7321425c27a7067bebeeacd509f96695b875fc\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-macosx_10_13_x86_64.whl\", hash = \"sha256:602d4db80daf4497de93cb1ce00b8fc79969c0a7cf5b67bec96fa939268d806a\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:3a8a0d127c10b8d89e69bbd3430da0f73946d839e65fec00ae48ca7916a31948\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:ca9f835cdfedcb3f5947304e85b8ca3ace31eef6346d8027a97f4de5fb687534\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:8aa5c68e1e68fff7cd3142288101deb4316b51f03d50c92de6ea5ce646e6c71f\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:3b512f1de1c688f88dbe1b8bb1283f7fbeb7a2b2b26e743bb2193cbadfa6f307\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:cc9253069158d57e27d47a8453d8a2c5a370dc461374111b5184cf2f147a3cc3\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:0b2501f1b981e70932b4a552fc9b3c942991c7ae429ea117e8fba57718cdeed0\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:28a3d083819741592685762d51d789e6155411277050d08066537c5edc4066e6\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-musllinux_1_2_armv7l.whl\", hash = \"sha256:0df3788187559c262922846087e36228b75987f3ae31dd0a1e5ee1034090d42f\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-musllinux_1_2_i686.whl\", hash = \"sha256:9e73fa341d8b308bb799cf0ab6f55fc0461d27a9fa3e4582755a3d81a6af8c09\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-musllinux_1_2_ppc64le.whl\", hash = \"sha256:51ba80d473eb780a329d73ac8afa44aa71dfb521693ccea1dea8b9b5c4df45ce\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-musllinux_1_2_s390x.whl\", hash = \"sha256:8d1dd75aa4d855c7debaf1ef830ff2dfcc33f893c7db0af2423ee761ebffd22b\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:41cf0cefd9e7b5c646c2ef529c8335e7eafd326f444cc1cdb0c47b6bc836f9be\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-win32.whl\", hash = \"sha256:948abc8952aff63de7b2c83bfe3f211c727da3a33c3a5866a0e2cf1ee1aa950f\"},\n {file = \"aiohttp-3.11.14-cp312-cp312-win_amd64.whl\", hash = \"sha256:3b420d076a46f41ea48e5fcccb996f517af0d406267e31e6716f480a3d50d65c\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-macosx_10_13_universal2.whl\", hash = \"sha256:8d14e274828561db91e4178f0057a915f3af1757b94c2ca283cb34cbb6e00b50\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:f30fc72daf85486cdcdfc3f5e0aea9255493ef499e31582b34abadbfaafb0965\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:4edcbe34e6dba0136e4cabf7568f5a434d89cc9de5d5155371acda275353d228\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:1a7169ded15505f55a87f8f0812c94c9412623c744227b9e51083a72a48b68a5\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:ad1f2fb9fe9b585ea4b436d6e998e71b50d2b087b694ab277b30e060c434e5db\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:20412c7cc3720e47a47e63c0005f78c0c2370020f9f4770d7fc0075f397a9fb0\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:6dd9766da617855f7e85f27d2bf9a565ace04ba7c387323cd3e651ac4329db91\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:599b66582f7276ebefbaa38adf37585e636b6a7a73382eb412f7bc0fc55fb73d\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:b41693b7388324b80f9acfabd479bd1c84f0bc7e8f17bab4ecd9675e9ff9c734\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-musllinux_1_2_armv7l.whl\", hash = \"sha256:86135c32d06927339c8c5e64f96e4eee8825d928374b9b71a3c42379d7437058\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-musllinux_1_2_i686.whl\", hash = \"sha256:04eb541ce1e03edc1e3be1917a0f45ac703e913c21a940111df73a2c2db11d73\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-musllinux_1_2_ppc64le.whl\", hash = \"sha256:dc311634f6f28661a76cbc1c28ecf3b3a70a8edd67b69288ab7ca91058eb5a33\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-musllinux_1_2_s390x.whl\", hash = \"sha256:69bb252bfdca385ccabfd55f4cd740d421dd8c8ad438ded9637d81c228d0da49\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:2b86efe23684b58a88e530c4ab5b20145f102916bbb2d82942cafec7bd36a647\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-win32.whl\", hash = \"sha256:b9c60d1de973ca94af02053d9b5111c4fbf97158e139b14f1be68337be267be6\"},\n {file = \"aiohttp-3.11.14-cp313-cp313-win_amd64.whl\", hash = \"sha256:0a29be28e60e5610d2437b5b2fed61d6f3dcde898b57fb048aa5079271e7f6f3\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-macosx_10_9_universal2.whl\", hash = \"sha256:14fc03508359334edc76d35b2821832f092c8f092e4b356e74e38419dfe7b6de\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-macosx_10_9_x86_64.whl\", hash = \"sha256:92007c89a8cb7be35befa2732b0b32bf3a394c1b22ef2dff0ef12537d98a7bda\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-macosx_11_0_arm64.whl\", hash = \"sha256:6d3986112e34eaa36e280dc8286b9dd4cc1a5bcf328a7f147453e188f6fe148f\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:749f1eb10e51dbbcdba9df2ef457ec060554842eea4d23874a3e26495f9e87b1\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:781c8bd423dcc4641298c8c5a2a125c8b1c31e11f828e8d35c1d3a722af4c15a\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:997b57e38aa7dc6caab843c5e042ab557bc83a2f91b7bd302e3c3aebbb9042a1\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:3a8b0321e40a833e381d127be993b7349d1564b756910b28b5f6588a159afef3\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:8778620396e554b758b59773ab29c03b55047841d8894c5e335f12bfc45ebd28\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-musllinux_1_2_aarch64.whl\", hash = \"sha256:e906da0f2bcbf9b26cc2b144929e88cb3bf943dd1942b4e5af066056875c7618\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-musllinux_1_2_armv7l.whl\", hash = \"sha256:87f0e003fb4dd5810c7fbf47a1239eaa34cd929ef160e0a54c570883125c4831\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-musllinux_1_2_i686.whl\", hash = \"sha256:7f2dadece8b85596ac3ab1ec04b00694bdd62abc31e5618f524648d18d9dd7fa\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-musllinux_1_2_ppc64le.whl\", hash = \"sha256:fe846f0a98aa9913c2852b630cd39b4098f296e0907dd05f6c7b30d911afa4c3\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-musllinux_1_2_s390x.whl\", hash = \"sha256:ced66c5c6ad5bcaf9be54560398654779ec1c3695f1a9cf0ae5e3606694a000a\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-musllinux_1_2_x86_64.whl\", hash = \"sha256:a40087b82f83bd671cbeb5f582c233d196e9653220404a798798bfc0ee189fff\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-win32.whl\", hash = \"sha256:95d7787f2bcbf7cb46823036a8d64ccfbc2ffc7d52016b4044d901abceeba3db\"},\n {file = \"aiohttp-3.11.14-cp39-cp39-win_amd64.whl\", hash = \"sha256:22a8107896877212130c58f74e64b77f7007cb03cea8698be317272643602d45\"},\n {file = \"aiohttp-3.11.14.tar.gz\", hash = \"sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c\"},\n]\n\n[package.dependencies]\naiohappyeyeballs = \">=2.3.0\"\naiosignal = \">=1.1.2\"\nasync-timeout = {version = \">=4.0,<6.0\", markers = \"python_version < \\\"3.11\\\"\"}\nattrs = \">=17.3.0\"\nfrozenlist = \">=1.1.1\"\nmultidict = \">=4.5,<7.0\"\npropcache = \">=0.2.0\"\nyarl = \">=1.17.0,<2.0\"\n\n[package.extras]\nspeedups = [\"Brotli\", \"aiodns (>=3.2.0)\", \"brotlicffi\"]\n\n[[package]]\nname = \"aiohttp-retry\"\nversion = \"2.9.1\"\ndescription = \"Simple retry client for aiohttp\"\noptional = false\npython-versions = \">=3.7\"\ngroups = [\"main\"]\nfiles = [\n {file = \"aiohttp_retry-2.9.1-py3-none-any.whl\", hash = \"sha256:66d2759d1921838256a05a3f80ad7e724936f083e35be5abb5e16eed6be6dc54\"},\n {file = \"aiohttp_retry-2.9.1.tar.gz\", hash = \"sha256:8eb75e904ed4ee5c2ec242fefe85bf04240f685391c4879d8f541d6028ff01f1\"},\n]\n\n[package.dependencies]\naiohttp = \"*\"\n\n[[package]]\nname = \"aiosignal\"\nversion = \"1.3.2\"\ndescription = \"aiosignal: a list of registered asynchronous callbacks\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"aiosignal-1.3.2-py2.py3-none-any.whl\", hash = \"sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5\"},\n {file = \"aiosignal-1.3.2.tar.gz\", hash = \"sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54\"},\n]\n\n[package.dependencies]\nfrozenlist = \">=1.1.0\"\n\n[[package]]\nname = \"aiostream\"\nversion = \"0.5.2\"\ndescription = \"Generator-based operators for asynchronous iteration\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"aiostream-0.5.2-py3-none-any.whl\", hash = \"sha256:054660370be9d37f6fe3ece3851009240416bd082e469fd90cc8673d3818cf71\"},\n {file = \"aiostream-0.5.2.tar.gz\", hash = \"sha256:b71b519a2d66c38f0872403ab86417955b77352f08d9ad02ad46fc3926b389f4\"},\n]\n\n[package.dependencies]\ntyping-extensions = \"*\"\n\n[[package]]\nname = \"annotated-types\"\nversion = \"0.7.0\"\ndescription = \"Reusable constraint types to use with typing.Annotated\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"annotated_types-0.7.0-py3-none-any.whl\", hash = \"sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53\"},\n {file = \"annotated_types-0.7.0.tar.gz\", hash = \"sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89\"},\n]\n\n[[package]]\nname = \"async-timeout\"\nversion = \"5.0.1\"\ndescription = \"Timeout context manager for asyncio programs\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nmarkers = \"python_version < \\\"3.11\\\"\"\nfiles = [\n {file = \"async_timeout-5.0.1-py3-none-any.whl\", hash = \"sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c\"},\n {file = \"async_timeout-5.0.1.tar.gz\", hash = \"sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3\"},\n]\n\n[[package]]\nname = \"attrs\"\nversion = \"25.3.0\"\ndescription = \"Classes Without Boilerplate\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"attrs-25.3.0-py3-none-any.whl\", hash = \"sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3\"},\n {file = \"attrs-25.3.0.tar.gz\", hash = \"sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b\"},\n]\n\n[package.extras]\nbenchmark = [\"cloudpickle\", \"hypothesis\", \"mypy (>=1.11.1)\", \"pympler\", \"pytest (>=4.3.0)\", \"pytest-codspeed\", \"pytest-mypy-plugins\", \"pytest-xdist[psutil]\"]\ncov = [\"cloudpickle\", \"coverage[toml] (>=5.3)\", \"hypothesis\", \"mypy (>=1.11.1)\", \"pympler\", \"pytest (>=4.3.0)\", \"pytest-mypy-plugins\", \"pytest-xdist[psutil]\"]\ndev = [\"cloudpickle\", \"hypothesis\", \"mypy (>=1.11.1)\", \"pre-commit-uv\", \"pympler\", \"pytest (>=4.3.0)\", \"pytest-mypy-plugins\", \"pytest-xdist[psutil]\"]\ndocs = [\"cogapp\", \"furo\", \"myst-parser\", \"sphinx\", \"sphinx-notfound-page\", \"sphinxcontrib-towncrier\", \"towncrier\"]\ntests = [\"cloudpickle\", \"hypothesis\", \"mypy (>=1.11.1)\", \"pympler\", \"pytest (>=4.3.0)\", \"pytest-mypy-plugins\", \"pytest-xdist[psutil]\"]\ntests-mypy = [\"mypy (>=1.11.1)\", \"pytest-mypy-plugins\"]\n\n[[package]]\nname = \"cel-python\"\nversion = \"0.2.0\"\ndescription = \"Pure Python implementation of Google Common Expression Language\"\noptional = false\npython-versions = \"<4.0,>=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"cel_python-0.2.0-py3-none-any.whl\", hash = \"sha256:478ff73def7b39d51e6982f95d937a57c2b088c491c578fe5cecdbd79f476f60\"},\n {file = \"cel_python-0.2.0.tar.gz\", hash = \"sha256:75de72a5cf223ec690b236f0cc24da267219e667bd3e7f8f4f20595fcc1c0c0f\"},\n]\n\n[package.dependencies]\njmespath = \">=1.0.1,<2.0.0\"\nlark = \">=0.12.0,<0.13.0\"\npython-dateutil = \">=2.9.0.post0,<3.0.0\"\npyyaml = \">=6.0.1,<7.0.0\"\ntypes-python-dateutil = \">=2.9.0.20240316,<3.0.0.0\"\ntypes-pyyaml = \">=6.0.12.20240311,<7.0.0.0\"\n\n[[package]]\nname = \"frozenlist\"\nversion = \"1.5.0\"\ndescription = \"A list-like structure which implements collections.abc.MutableSequence\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl\", hash = \"sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl\", hash = \"sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl\", hash = \"sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl\", hash = \"sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl\", hash = \"sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl\", hash = \"sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl\", hash = \"sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl\", hash = \"sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-win32.whl\", hash = \"sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb\"},\n {file = \"frozenlist-1.5.0-cp310-cp310-win_amd64.whl\", hash = \"sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl\", hash = \"sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl\", hash = \"sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl\", hash = \"sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl\", hash = \"sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl\", hash = \"sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl\", hash = \"sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl\", hash = \"sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl\", hash = \"sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-win32.whl\", hash = \"sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942\"},\n {file = \"frozenlist-1.5.0-cp311-cp311-win_amd64.whl\", hash = \"sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl\", hash = \"sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl\", hash = \"sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl\", hash = \"sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl\", hash = \"sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl\", hash = \"sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-win32.whl\", hash = \"sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8\"},\n {file = \"frozenlist-1.5.0-cp312-cp312-win_amd64.whl\", hash = \"sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl\", hash = \"sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl\", hash = \"sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl\", hash = \"sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl\", hash = \"sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-win32.whl\", hash = \"sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c\"},\n {file = \"frozenlist-1.5.0-cp313-cp313-win_amd64.whl\", hash = \"sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl\", hash = \"sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl\", hash = \"sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl\", hash = \"sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl\", hash = \"sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl\", hash = \"sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl\", hash = \"sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl\", hash = \"sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl\", hash = \"sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-win32.whl\", hash = \"sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723\"},\n {file = \"frozenlist-1.5.0-cp38-cp38-win_amd64.whl\", hash = \"sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl\", hash = \"sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl\", hash = \"sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl\", hash = \"sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl\", hash = \"sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl\", hash = \"sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl\", hash = \"sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl\", hash = \"sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl\", hash = \"sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-win32.whl\", hash = \"sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3\"},\n {file = \"frozenlist-1.5.0-cp39-cp39-win_amd64.whl\", hash = \"sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0\"},\n {file = \"frozenlist-1.5.0-py3-none-any.whl\", hash = \"sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3\"},\n {file = \"frozenlist-1.5.0.tar.gz\", hash = \"sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817\"},\n]\n\n[[package]]\nname = \"grpcio\"\nversion = \"1.71.0\"\ndescription = \"HTTP/2-based RPC framework\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"grpcio-1.71.0-cp310-cp310-linux_armv7l.whl\", hash = \"sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd\"},\n {file = \"grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl\", hash = \"sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d\"},\n {file = \"grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl\", hash = \"sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea\"},\n {file = \"grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69\"},\n {file = \"grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73\"},\n {file = \"grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl\", hash = \"sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804\"},\n {file = \"grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl\", hash = \"sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6\"},\n {file = \"grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl\", hash = \"sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5\"},\n {file = \"grpcio-1.71.0-cp310-cp310-win32.whl\", hash = \"sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509\"},\n {file = \"grpcio-1.71.0-cp310-cp310-win_amd64.whl\", hash = \"sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a\"},\n {file = \"grpcio-1.71.0-cp311-cp311-linux_armv7l.whl\", hash = \"sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef\"},\n {file = \"grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl\", hash = \"sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7\"},\n {file = \"grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl\", hash = \"sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7\"},\n {file = \"grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7\"},\n {file = \"grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e\"},\n {file = \"grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl\", hash = \"sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b\"},\n {file = \"grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl\", hash = \"sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7\"},\n {file = \"grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl\", hash = \"sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3\"},\n {file = \"grpcio-1.71.0-cp311-cp311-win32.whl\", hash = \"sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444\"},\n {file = \"grpcio-1.71.0-cp311-cp311-win_amd64.whl\", hash = \"sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b\"},\n {file = \"grpcio-1.71.0-cp312-cp312-linux_armv7l.whl\", hash = \"sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537\"},\n {file = \"grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl\", hash = \"sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7\"},\n {file = \"grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl\", hash = \"sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec\"},\n {file = \"grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594\"},\n {file = \"grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c\"},\n {file = \"grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl\", hash = \"sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67\"},\n {file = \"grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl\", hash = \"sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db\"},\n {file = \"grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl\", hash = \"sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79\"},\n {file = \"grpcio-1.71.0-cp312-cp312-win32.whl\", hash = \"sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a\"},\n {file = \"grpcio-1.71.0-cp312-cp312-win_amd64.whl\", hash = \"sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8\"},\n {file = \"grpcio-1.71.0-cp313-cp313-linux_armv7l.whl\", hash = \"sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379\"},\n {file = \"grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl\", hash = \"sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3\"},\n {file = \"grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl\", hash = \"sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db\"},\n {file = \"grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29\"},\n {file = \"grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4\"},\n {file = \"grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl\", hash = \"sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3\"},\n {file = \"grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl\", hash = \"sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b\"},\n {file = \"grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl\", hash = \"sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637\"},\n {file = \"grpcio-1.71.0-cp313-cp313-win32.whl\", hash = \"sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb\"},\n {file = \"grpcio-1.71.0-cp313-cp313-win_amd64.whl\", hash = \"sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366\"},\n {file = \"grpcio-1.71.0-cp39-cp39-linux_armv7l.whl\", hash = \"sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d\"},\n {file = \"grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl\", hash = \"sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e\"},\n {file = \"grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl\", hash = \"sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033\"},\n {file = \"grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97\"},\n {file = \"grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d\"},\n {file = \"grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl\", hash = \"sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41\"},\n {file = \"grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl\", hash = \"sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3\"},\n {file = \"grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl\", hash = \"sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32\"},\n {file = \"grpcio-1.71.0-cp39-cp39-win32.whl\", hash = \"sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455\"},\n {file = \"grpcio-1.71.0-cp39-cp39-win_amd64.whl\", hash = \"sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a\"},\n {file = \"grpcio-1.71.0.tar.gz\", hash = \"sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c\"},\n]\n\n[package.extras]\nprotobuf = [\"grpcio-tools (>=1.71.0)\"]\n\n[[package]]\nname = \"grpcio-tools\"\nversion = \"1.71.0\"\ndescription = \"Protobuf code generator for gRPC\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"grpcio_tools-1.71.0-cp310-cp310-linux_armv7l.whl\", hash = \"sha256:f4ad7f0d756546902597053d70b3af2606fbd70d7972876cd75c1e241d22ae00\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-macosx_12_0_universal2.whl\", hash = \"sha256:64bdb291df61cf570b5256777ad5fe2b1db6d67bc46e55dc56a0a862722ae329\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl\", hash = \"sha256:8dd9795e982d77a4b496f7278b943c2563d9afde2069cdee78c111a40cc4d675\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:c1b5860c41a36b26fec4f52998f1a451d0525a5c9a4fb06b6ea3e9211abdb925\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:3059c14035e5dc03d462f261e5900b9a077fd1a36976c3865b8507474520bad4\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl\", hash = \"sha256:f360981b215b1d5aff9235b37e7e1826246e35bbac32a53e41d4e990a37b8f4c\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_i686.whl\", hash = \"sha256:bfe3888c3bbe16a5aa39409bc38744a31c0c3d2daa2b0095978c56e106c85b42\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl\", hash = \"sha256:145985c0bf12131f0a1503e65763e0f060473f7f3928ed1ff3fb0e8aad5bc8ac\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-win32.whl\", hash = \"sha256:82c430edd939bb863550ee0fecf067d78feff828908a1b529bbe33cc57f2419c\"},\n {file = \"grpcio_tools-1.71.0-cp310-cp310-win_amd64.whl\", hash = \"sha256:83e90724e3f02415c628e4ead1d6ffe063820aaaa078d9a39176793df958cd5a\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-linux_armv7l.whl\", hash = \"sha256:1f19b16b49afa5d21473f49c0966dd430c88d089cd52ac02404d8cef67134efb\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-macosx_10_14_universal2.whl\", hash = \"sha256:459c8f5e00e390aecd5b89de67deb3ec7188a274bc6cb50e43cef35ab3a3f45d\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl\", hash = \"sha256:edab7e6518de01196be37f96cb1e138c3819986bf5e2a6c9e1519b4d716b2f5a\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:8b93b9f6adc7491d4c10144c0643409db298e5e63c997106a804f6f0248dbaf4\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:6ae5f2efa9e644c10bf1021600bfc099dfbd8e02b184d2d25dc31fcd6c2bc59e\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl\", hash = \"sha256:65aa082f4435571d65d5ce07fc444f23c3eff4f3e34abef599ef8c9e1f6f360f\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_i686.whl\", hash = \"sha256:1331e726e08b7bdcbf2075fcf4b47dff07842b04845e6e220a08a4663e232d7f\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl\", hash = \"sha256:6693a7d3ba138b0e693b3d1f687cdd9db9e68976c3fa2b951c17a072fea8b583\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-win32.whl\", hash = \"sha256:6d11ed3ff7b6023b5c72a8654975324bb98c1092426ba5b481af406ff559df00\"},\n {file = \"grpcio_tools-1.71.0-cp311-cp311-win_amd64.whl\", hash = \"sha256:072b2a5805ac97e4623b3aa8f7818275f3fb087f4aa131b0fce00471065f6eaa\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-linux_armv7l.whl\", hash = \"sha256:61c0409d5bdac57a7bd0ce0ab01c1c916728fe4c8a03d77a25135ad481eb505c\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-macosx_10_14_universal2.whl\", hash = \"sha256:28784f39921d061d2164a9dcda5164a69d07bf29f91f0ea50b505958292312c9\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl\", hash = \"sha256:192808cf553cedca73f0479cc61d5684ad61f24db7a5f3c4dfe1500342425866\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:989ee9da61098230d3d4c8f8f8e27c2de796f1ff21b1c90110e636d9acd9432b\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:541a756276c8a55dec991f6c0106ae20c8c8f5ce8d0bdbfcb01e2338d1a8192b\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl\", hash = \"sha256:870c0097700d13c403e5517cb7750ab5b4a791ce3e71791c411a38c5468b64bd\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_i686.whl\", hash = \"sha256:abd57f615e88bf93c3c6fd31f923106e3beb12f8cd2df95b0d256fa07a7a0a57\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl\", hash = \"sha256:753270e2d06d37e6d7af8967d1d059ec635ad215882041a36294f4e2fd502b2e\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-win32.whl\", hash = \"sha256:0e647794bd7138b8c215e86277a9711a95cf6a03ff6f9e555d54fdf7378b9f9d\"},\n {file = \"grpcio_tools-1.71.0-cp312-cp312-win_amd64.whl\", hash = \"sha256:48debc879570972d28bfe98e4970eff25bb26da3f383e0e49829b2d2cd35ad87\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-linux_armv7l.whl\", hash = \"sha256:9a78d07d6c301a25ef5ede962920a522556a1dfee1ccc05795994ceb867f766c\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-macosx_10_14_universal2.whl\", hash = \"sha256:580ac88141c9815557e63c9c04f5b1cdb19b4db8d0cb792b573354bde1ee8b12\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl\", hash = \"sha256:f7c678e68ece0ae908ecae1c4314a0c2c7f83e26e281738b9609860cc2c82d96\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:56ecd6cc89b5e5eed1de5eb9cafce86c9c9043ee3840888cc464d16200290b53\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:e52a041afc20ab2431d756b6295d727bd7adee813b21b06a3483f4a7a15ea15f\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl\", hash = \"sha256:2a1712f12102b60c8d92779b89d0504e0d6f3a59f2b933e5622b8583f5c02992\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_i686.whl\", hash = \"sha256:41878cb7a75477e62fdd45e7e9155b3af1b7a5332844021e2511deaf99ac9e6c\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl\", hash = \"sha256:682e958b476049ccc14c71bedf3f979bced01f6e0c04852efc5887841a32ad6b\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-win32.whl\", hash = \"sha256:0ccfb837152b7b858b9f26bb110b3ae8c46675d56130f6c2f03605c4f129be13\"},\n {file = \"grpcio_tools-1.71.0-cp313-cp313-win_amd64.whl\", hash = \"sha256:ffff9bc5eacb34dd26b487194f7d44a3e64e752fc2cf049d798021bf25053b87\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-linux_armv7l.whl\", hash = \"sha256:834959b6eceb85de5217a411aba1643b5f782798680c122202d6a06177226644\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-macosx_10_14_universal2.whl\", hash = \"sha256:e3ae9556e2a1cd70e7d7b0e0459c35af71d51a7dae4cf36075068011a69f13ec\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl\", hash = \"sha256:77fe6db1334e0ce318b2cb4e70afa94e0c173ed1a533d37aea69ad9f61ae8ea9\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:57e3e2544c306b60ef2d76570bac4e977be1ad548641c9eec130c3bc47e80141\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:af39e245fa56f7f5c2fe86b7d6c1b78f395c07e54d5613cbdbb3c24769a92b6e\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl\", hash = \"sha256:8f987d0053351217954543b174b0bddbf51d45b3cfcf8d6de97b0a43d264d753\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_i686.whl\", hash = \"sha256:8e6cdbba4dae7b37b0d25d074614be9936fb720144420f03d9f142a80be69ba2\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl\", hash = \"sha256:d3adc8b229e60c77bab5a5d62b415667133bd5ced7d59b5f71d6317c9143631e\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-win32.whl\", hash = \"sha256:f68334d28a267fabec6e70cb5986e9999cfbfd14db654094ddf9aedd804a293a\"},\n {file = \"grpcio_tools-1.71.0-cp39-cp39-win_amd64.whl\", hash = \"sha256:1291a6136c07a86c3bb09f6c33f5cf227cc14956edd1b85cb572327a36e0aef8\"},\n {file = \"grpcio_tools-1.71.0.tar.gz\", hash = \"sha256:38dba8e0d5e0fb23a034e09644fdc6ed862be2371887eee54901999e8f6792a8\"},\n]\n\n[package.dependencies]\ngrpcio = \">=1.71.0\"\nprotobuf = \">=5.26.1,<6.0dev\"\nsetuptools = \"*\"\n\n[[package]]\nname = \"hatchet-sdk\"\nversion = \"1.0.0a1\"\ndescription = \"\"\noptional = false\npython-versions = \"<4.0,>=3.10\"\ngroups = [\"main\"]\nfiles = [\n {file = \"hatchet_sdk-1.0.0a1-py3-none-any.whl\", hash = \"sha256:bfc84358c8842cecd0d95b30645109733b7292dff0db1a776ca862785ee93d7f\"},\n {file = \"hatchet_sdk-1.0.0a1.tar.gz\", hash = \"sha256:f0272bbaac6faed75ff727826e9f7b1ac42ae597f9b590e14d392aada9c9692f\"},\n]\n\n[package.dependencies]\naiohttp = \">=3.10.5,<4.0.0\"\naiohttp-retry = \">=2.8.3,<3.0.0\"\naiostream = \">=0.5.2,<0.6.0\"\ncel-python = \">=0.2.0,<0.3.0\"\ngrpcio = [\n {version = \">=1.64.1,<1.68.dev0 || >=1.69.dev0\", markers = \"python_version < \\\"3.13\\\"\"},\n {version = \">=1.69.0\", markers = \"python_version >= \\\"3.13\\\"\"},\n]\ngrpcio-tools = [\n {version = \">=1.64.1,<1.68.dev0 || >=1.69.dev0\", markers = \"python_version < \\\"3.13\\\"\"},\n {version = \">=1.69.0\", markers = \"python_version >= \\\"3.13\\\"\"},\n]\nnest-asyncio = \">=1.6.0,<2.0.0\"\nprometheus-client = \">=0.21.1,<0.22.0\"\nprotobuf = \">=5.29.1,<6.0.0\"\npydantic = \">=2.6.3,<3.0.0\"\npydantic-settings = \">=2.7.1,<3.0.0\"\npython-dateutil = \">=2.9.0.post0,<3.0.0\"\npyyaml = \">=6.0.1,<7.0.0\"\ntenacity = \">=8.4.1\"\nurllib3 = \">=1.26.20\"\n\n[package.extras]\notel = [\"opentelemetry-api (>=1.28.0,<2.0.0)\", \"opentelemetry-distro (>=0.49b0)\", \"opentelemetry-exporter-otlp (>=1.28.0,<2.0.0)\", \"opentelemetry-exporter-otlp-proto-http (>=1.28.0,<2.0.0)\", \"opentelemetry-instrumentation (>=0.49b0)\", \"opentelemetry-sdk (>=1.28.0,<2.0.0)\"]\n\n[[package]]\nname = \"idna\"\nversion = \"3.10\"\ndescription = \"Internationalized Domain Names in Applications (IDNA)\"\noptional = false\npython-versions = \">=3.6\"\ngroups = [\"main\"]\nfiles = [\n {file = \"idna-3.10-py3-none-any.whl\", hash = \"sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3\"},\n {file = \"idna-3.10.tar.gz\", hash = \"sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9\"},\n]\n\n[package.extras]\nall = [\"flake8 (>=7.1.1)\", \"mypy (>=1.11.2)\", \"pytest (>=8.3.2)\", \"ruff (>=0.6.2)\"]\n\n[[package]]\nname = \"jmespath\"\nversion = \"1.0.1\"\ndescription = \"JSON Matching Expressions\"\noptional = false\npython-versions = \">=3.7\"\ngroups = [\"main\"]\nfiles = [\n {file = \"jmespath-1.0.1-py3-none-any.whl\", hash = \"sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980\"},\n {file = \"jmespath-1.0.1.tar.gz\", hash = \"sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe\"},\n]\n\n[[package]]\nname = \"lark\"\nversion = \"0.12.0\"\ndescription = \"a modern parsing library\"\noptional = false\npython-versions = \"*\"\ngroups = [\"main\"]\nfiles = [\n {file = \"lark-0.12.0-py2.py3-none-any.whl\", hash = \"sha256:ed1d891cbcf5151ead1c1d14663bf542443e579e63a76ae175b01b899bd854ca\"},\n {file = \"lark-0.12.0.tar.gz\", hash = \"sha256:7da76fcfddadabbbbfd949bbae221efd33938451d90b1fefbbc423c3cccf48ef\"},\n]\n\n[package.extras]\natomic-cache = [\"atomicwrites\"]\nnearley = [\"js2py\"]\nregex = [\"regex\"]\n\n[[package]]\nname = \"multidict\"\nversion = \"6.2.0\"\ndescription = \"multidict implementation\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"multidict-6.2.0-cp310-cp310-macosx_10_9_universal2.whl\", hash = \"sha256:b9f6392d98c0bd70676ae41474e2eecf4c7150cb419237a41f8f96043fcb81d1\"},\n {file = \"multidict-6.2.0-cp310-cp310-macosx_10_9_x86_64.whl\", hash = \"sha256:3501621d5e86f1a88521ea65d5cad0a0834c77b26f193747615b7c911e5422d2\"},\n {file = \"multidict-6.2.0-cp310-cp310-macosx_11_0_arm64.whl\", hash = \"sha256:32ed748ff9ac682eae7859790d3044b50e3076c7d80e17a44239683769ff485e\"},\n {file = \"multidict-6.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:cc826b9a8176e686b67aa60fd6c6a7047b0461cae5591ea1dc73d28f72332a8a\"},\n {file = \"multidict-6.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:214207dcc7a6221d9942f23797fe89144128a71c03632bf713d918db99bd36de\"},\n {file = \"multidict-6.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:05fefbc3cddc4e36da209a5e49f1094bbece9a581faa7f3589201fd95df40e5d\"},\n {file = \"multidict-6.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:e851e6363d0dbe515d8de81fd544a2c956fdec6f8a049739562286727d4a00c3\"},\n {file = \"multidict-6.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:32c9b4878f48be3e75808ea7e499d6223b1eea6d54c487a66bc10a1871e3dc6a\"},\n {file = \"multidict-6.2.0-cp310-cp310-musllinux_1_2_aarch64.whl\", hash = \"sha256:7243c5a6523c5cfeca76e063efa5f6a656d1d74c8b1fc64b2cd1e84e507f7e2a\"},\n {file = \"multidict-6.2.0-cp310-cp310-musllinux_1_2_i686.whl\", hash = \"sha256:0e5a644e50ef9fb87878d4d57907f03a12410d2aa3b93b3acdf90a741df52c49\"},\n {file = \"multidict-6.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl\", hash = \"sha256:0dc25a3293c50744796e87048de5e68996104d86d940bb24bc3ec31df281b191\"},\n {file = \"multidict-6.2.0-cp310-cp310-musllinux_1_2_s390x.whl\", hash = \"sha256:a49994481b99cd7dedde07f2e7e93b1d86c01c0fca1c32aded18f10695ae17eb\"},\n {file = \"multidict-6.2.0-cp310-cp310-musllinux_1_2_x86_64.whl\", hash = \"sha256:641cf2e3447c9ecff2f7aa6e9eee9eaa286ea65d57b014543a4911ff2799d08a\"},\n {file = \"multidict-6.2.0-cp310-cp310-win32.whl\", hash = \"sha256:0c383d28857f66f5aebe3e91d6cf498da73af75fbd51cedbe1adfb85e90c0460\"},\n {file = \"multidict-6.2.0-cp310-cp310-win_amd64.whl\", hash = \"sha256:a33273a541f1e1a8219b2a4ed2de355848ecc0254264915b9290c8d2de1c74e1\"},\n {file = \"multidict-6.2.0-cp311-cp311-macosx_10_9_universal2.whl\", hash = \"sha256:84e87a7d75fa36839a3a432286d719975362d230c70ebfa0948549cc38bd5b46\"},\n {file = \"multidict-6.2.0-cp311-cp311-macosx_10_9_x86_64.whl\", hash = \"sha256:8de4d42dffd5ced9117af2ce66ba8722402541a3aa98ffdf78dde92badb68932\"},\n {file = \"multidict-6.2.0-cp311-cp311-macosx_11_0_arm64.whl\", hash = \"sha256:e7d91a230c7f8af86c904a5a992b8c064b66330544693fd6759c3d6162382ecf\"},\n {file = \"multidict-6.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:9f6cad071960ba1914fa231677d21b1b4a3acdcce463cee41ea30bc82e6040cf\"},\n {file = \"multidict-6.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:0f74f2fc51555f4b037ef278efc29a870d327053aba5cb7d86ae572426c7cccc\"},\n {file = \"multidict-6.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:14ed9ed1bfedd72a877807c71113deac292bf485159a29025dfdc524c326f3e1\"},\n {file = \"multidict-6.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:4ac3fcf9a2d369bd075b2c2965544036a27ccd277fc3c04f708338cc57533081\"},\n {file = \"multidict-6.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:2fc6af8e39f7496047c7876314f4317736eac82bf85b54c7c76cf1a6f8e35d98\"},\n {file = \"multidict-6.2.0-cp311-cp311-musllinux_1_2_aarch64.whl\", hash = \"sha256:5f8cb1329f42fadfb40d6211e5ff568d71ab49be36e759345f91c69d1033d633\"},\n {file = \"multidict-6.2.0-cp311-cp311-musllinux_1_2_i686.whl\", hash = \"sha256:5389445f0173c197f4a3613713b5fb3f3879df1ded2a1a2e4bc4b5b9c5441b7e\"},\n {file = \"multidict-6.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl\", hash = \"sha256:94a7bb972178a8bfc4055db80c51efd24baefaced5e51c59b0d598a004e8305d\"},\n {file = \"multidict-6.2.0-cp311-cp311-musllinux_1_2_s390x.whl\", hash = \"sha256:da51d8928ad8b4244926fe862ba1795f0b6e68ed8c42cd2f822d435db9c2a8f4\"},\n {file = \"multidict-6.2.0-cp311-cp311-musllinux_1_2_x86_64.whl\", hash = \"sha256:063be88bd684782a0715641de853e1e58a2f25b76388538bd62d974777ce9bc2\"},\n {file = \"multidict-6.2.0-cp311-cp311-win32.whl\", hash = \"sha256:52b05e21ff05729fbea9bc20b3a791c3c11da61649ff64cce8257c82a020466d\"},\n {file = \"multidict-6.2.0-cp311-cp311-win_amd64.whl\", hash = \"sha256:1e2a2193d3aa5cbf5758f6d5680a52aa848e0cf611da324f71e5e48a9695cc86\"},\n {file = \"multidict-6.2.0-cp312-cp312-macosx_10_13_universal2.whl\", hash = \"sha256:437c33561edb6eb504b5a30203daf81d4a9b727e167e78b0854d9a4e18e8950b\"},\n {file = \"multidict-6.2.0-cp312-cp312-macosx_10_13_x86_64.whl\", hash = \"sha256:9f49585f4abadd2283034fc605961f40c638635bc60f5162276fec075f2e37a4\"},\n {file = \"multidict-6.2.0-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:5dd7106d064d05896ce28c97da3f46caa442fe5a43bc26dfb258e90853b39b44\"},\n {file = \"multidict-6.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:e25b11a0417475f093d0f0809a149aff3943c2c56da50fdf2c3c88d57fe3dfbd\"},\n {file = \"multidict-6.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:ac380cacdd3b183338ba63a144a34e9044520a6fb30c58aa14077157a033c13e\"},\n {file = \"multidict-6.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:61d5541f27533f803a941d3a3f8a3d10ed48c12cf918f557efcbf3cd04ef265c\"},\n {file = \"multidict-6.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:facaf11f21f3a4c51b62931feb13310e6fe3475f85e20d9c9fdce0d2ea561b87\"},\n {file = \"multidict-6.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:095a2eabe8c43041d3e6c2cb8287a257b5f1801c2d6ebd1dd877424f1e89cf29\"},\n {file = \"multidict-6.2.0-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:a0cc398350ef31167e03f3ca7c19313d4e40a662adcb98a88755e4e861170bdd\"},\n {file = \"multidict-6.2.0-cp312-cp312-musllinux_1_2_i686.whl\", hash = \"sha256:7c611345bbe7cb44aabb877cb94b63e86f2d0db03e382667dbd037866d44b4f8\"},\n {file = \"multidict-6.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl\", hash = \"sha256:8cd1a0644ccaf27e9d2f6d9c9474faabee21f0578fe85225cc5af9a61e1653df\"},\n {file = \"multidict-6.2.0-cp312-cp312-musllinux_1_2_s390x.whl\", hash = \"sha256:89b3857652183b8206a891168af47bac10b970d275bba1f6ee46565a758c078d\"},\n {file = \"multidict-6.2.0-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:125dd82b40f8c06d08d87b3510beaccb88afac94e9ed4a6f6c71362dc7dbb04b\"},\n {file = \"multidict-6.2.0-cp312-cp312-win32.whl\", hash = \"sha256:76b34c12b013d813e6cb325e6bd4f9c984db27758b16085926bbe7ceeaace626\"},\n {file = \"multidict-6.2.0-cp312-cp312-win_amd64.whl\", hash = \"sha256:0b183a959fb88ad1be201de2c4bdf52fa8e46e6c185d76201286a97b6f5ee65c\"},\n {file = \"multidict-6.2.0-cp313-cp313-macosx_10_13_universal2.whl\", hash = \"sha256:5c5e7d2e300d5cb3b2693b6d60d3e8c8e7dd4ebe27cd17c9cb57020cac0acb80\"},\n {file = \"multidict-6.2.0-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:256d431fe4583c5f1e0f2e9c4d9c22f3a04ae96009b8cfa096da3a8723db0a16\"},\n {file = \"multidict-6.2.0-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:a3c0ff89fe40a152e77b191b83282c9664357dce3004032d42e68c514ceff27e\"},\n {file = \"multidict-6.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:ef7d48207926edbf8b16b336f779c557dd8f5a33035a85db9c4b0febb0706817\"},\n {file = \"multidict-6.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:1f3c099d3899b14e1ce52262eb82a5f5cb92157bb5106bf627b618c090a0eadc\"},\n {file = \"multidict-6.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:e16e7297f29a544f49340012d6fc08cf14de0ab361c9eb7529f6a57a30cbfda1\"},\n {file = \"multidict-6.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:042028348dc5a1f2be6c666437042a98a5d24cee50380f4c0902215e5ec41844\"},\n {file = \"multidict-6.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:08549895e6a799bd551cf276f6e59820aa084f0f90665c0f03dd3a50db5d3c48\"},\n {file = \"multidict-6.2.0-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:4ccfd74957ef53fa7380aaa1c961f523d582cd5e85a620880ffabd407f8202c0\"},\n {file = \"multidict-6.2.0-cp313-cp313-musllinux_1_2_i686.whl\", hash = \"sha256:83b78c680d4b15d33042d330c2fa31813ca3974197bddb3836a5c635a5fd013f\"},\n {file = \"multidict-6.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl\", hash = \"sha256:b4c153863dd6569f6511845922c53e39c8d61f6e81f228ad5443e690fca403de\"},\n {file = \"multidict-6.2.0-cp313-cp313-musllinux_1_2_s390x.whl\", hash = \"sha256:98aa8325c7f47183b45588af9c434533196e241be0a4e4ae2190b06d17675c02\"},\n {file = \"multidict-6.2.0-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:9e658d1373c424457ddf6d55ec1db93c280b8579276bebd1f72f113072df8a5d\"},\n {file = \"multidict-6.2.0-cp313-cp313-win32.whl\", hash = \"sha256:3157126b028c074951839233647bd0e30df77ef1fedd801b48bdcad242a60f4e\"},\n {file = \"multidict-6.2.0-cp313-cp313-win_amd64.whl\", hash = \"sha256:2e87f1926e91855ae61769ba3e3f7315120788c099677e0842e697b0bfb659f2\"},\n {file = \"multidict-6.2.0-cp313-cp313t-macosx_10_13_universal2.whl\", hash = \"sha256:2529ddbdaa424b2c6c2eb668ea684dd6b75b839d0ad4b21aad60c168269478d7\"},\n {file = \"multidict-6.2.0-cp313-cp313t-macosx_10_13_x86_64.whl\", hash = \"sha256:13551d0e2d7201f0959725a6a769b6f7b9019a168ed96006479c9ac33fe4096b\"},\n {file = \"multidict-6.2.0-cp313-cp313t-macosx_11_0_arm64.whl\", hash = \"sha256:d1996ee1330e245cd3aeda0887b4409e3930524c27642b046e4fae88ffa66c5e\"},\n {file = \"multidict-6.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:c537da54ce4ff7c15e78ab1292e5799d0d43a2108e006578a57f531866f64025\"},\n {file = \"multidict-6.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:0f249badb360b0b4d694307ad40f811f83df4da8cef7b68e429e4eea939e49dd\"},\n {file = \"multidict-6.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:48d39b1824b8d6ea7de878ef6226efbe0773f9c64333e1125e0efcfdd18a24c7\"},\n {file = \"multidict-6.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:b99aac6bb2c37db336fa03a39b40ed4ef2818bf2dfb9441458165ebe88b793af\"},\n {file = \"multidict-6.2.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:07bfa8bc649783e703263f783f73e27fef8cd37baaad4389816cf6a133141331\"},\n {file = \"multidict-6.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl\", hash = \"sha256:b2c00ad31fbc2cbac85d7d0fcf90853b2ca2e69d825a2d3f3edb842ef1544a2c\"},\n {file = \"multidict-6.2.0-cp313-cp313t-musllinux_1_2_i686.whl\", hash = \"sha256:0d57a01a2a9fa00234aace434d8c131f0ac6e0ac6ef131eda5962d7e79edfb5b\"},\n {file = \"multidict-6.2.0-cp313-cp313t-musllinux_1_2_ppc64le.whl\", hash = \"sha256:abf5b17bc0cf626a8a497d89ac691308dbd825d2ac372aa990b1ca114e470151\"},\n {file = \"multidict-6.2.0-cp313-cp313t-musllinux_1_2_s390x.whl\", hash = \"sha256:f7716f7e7138252d88607228ce40be22660d6608d20fd365d596e7ca0738e019\"},\n {file = \"multidict-6.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl\", hash = \"sha256:d5a36953389f35f0a4e88dc796048829a2f467c9197265504593f0e420571547\"},\n {file = \"multidict-6.2.0-cp313-cp313t-win32.whl\", hash = \"sha256:e653d36b1bf48fa78c7fcebb5fa679342e025121ace8c87ab05c1cefd33b34fc\"},\n {file = \"multidict-6.2.0-cp313-cp313t-win_amd64.whl\", hash = \"sha256:ca23db5fb195b5ef4fd1f77ce26cadefdf13dba71dab14dadd29b34d457d7c44\"},\n {file = \"multidict-6.2.0-cp39-cp39-macosx_10_9_universal2.whl\", hash = \"sha256:b4f3d66dd0354b79761481fc15bdafaba0b9d9076f1f42cc9ce10d7fcbda205a\"},\n {file = \"multidict-6.2.0-cp39-cp39-macosx_10_9_x86_64.whl\", hash = \"sha256:6e2a2d6749e1ff2c9c76a72c6530d5baa601205b14e441e6d98011000f47a7ac\"},\n {file = \"multidict-6.2.0-cp39-cp39-macosx_11_0_arm64.whl\", hash = \"sha256:cca83a629f77402cfadd58352e394d79a61c8015f1694b83ab72237ec3941f88\"},\n {file = \"multidict-6.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:781b5dd1db18c9e9eacc419027b0acb5073bdec9de1675c0be25ceb10e2ad133\"},\n {file = \"multidict-6.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:cf8d370b2fea27fb300825ec3984334f7dd54a581bde6456799ba3776915a656\"},\n {file = \"multidict-6.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:25bb96338512e2f46f615a2bb7c6012fe92a4a5ebd353e5020836a7e33120349\"},\n {file = \"multidict-6.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:19e2819b0b468174de25c0ceed766606a07cedeab132383f1e83b9a4e96ccb4f\"},\n {file = \"multidict-6.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:6aed763b6a1b28c46c055692836879328f0b334a6d61572ee4113a5d0c859872\"},\n {file = \"multidict-6.2.0-cp39-cp39-musllinux_1_2_aarch64.whl\", hash = \"sha256:a1133414b771619aa3c3000701c11b2e4624a7f492f12f256aedde97c28331a2\"},\n {file = \"multidict-6.2.0-cp39-cp39-musllinux_1_2_i686.whl\", hash = \"sha256:639556758c36093b35e2e368ca485dada6afc2bd6a1b1207d85ea6dfc3deab27\"},\n {file = \"multidict-6.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl\", hash = \"sha256:163f4604e76639f728d127293d24c3e208b445b463168af3d031b92b0998bb90\"},\n {file = \"multidict-6.2.0-cp39-cp39-musllinux_1_2_s390x.whl\", hash = \"sha256:2325105e16d434749e1be8022f942876a936f9bece4ec41ae244e3d7fae42aaf\"},\n {file = \"multidict-6.2.0-cp39-cp39-musllinux_1_2_x86_64.whl\", hash = \"sha256:e4371591e621579cb6da8401e4ea405b33ff25a755874a3567c4075ca63d56e2\"},\n {file = \"multidict-6.2.0-cp39-cp39-win32.whl\", hash = \"sha256:d1175b0e0d6037fab207f05774a176d71210ebd40b1c51f480a04b65ec5c786d\"},\n {file = \"multidict-6.2.0-cp39-cp39-win_amd64.whl\", hash = \"sha256:ad81012b24b88aad4c70b2cbc2dad84018783221b7f923e926f4690ff8569da3\"},\n {file = \"multidict-6.2.0-py3-none-any.whl\", hash = \"sha256:5d26547423e5e71dcc562c4acdc134b900640a39abd9066d7326a7cc2324c530\"},\n {file = \"multidict-6.2.0.tar.gz\", hash = \"sha256:0085b0afb2446e57050140240a8595846ed64d1cbd26cef936bfab3192c673b8\"},\n]\n\n[package.dependencies]\ntyping-extensions = {version = \">=4.1.0\", markers = \"python_version < \\\"3.11\\\"\"}\n\n[[package]]\nname = \"nest-asyncio\"\nversion = \"1.6.0\"\ndescription = \"Patch asyncio to allow nested event loops\"\noptional = false\npython-versions = \">=3.5\"\ngroups = [\"main\"]\nfiles = [\n {file = \"nest_asyncio-1.6.0-py3-none-any.whl\", hash = \"sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c\"},\n {file = \"nest_asyncio-1.6.0.tar.gz\", hash = \"sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe\"},\n]\n\n[[package]]\nname = \"prometheus-client\"\nversion = \"0.21.1\"\ndescription = \"Python client for the Prometheus monitoring system.\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"prometheus_client-0.21.1-py3-none-any.whl\", hash = \"sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301\"},\n {file = \"prometheus_client-0.21.1.tar.gz\", hash = \"sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb\"},\n]\n\n[package.extras]\ntwisted = [\"twisted\"]\n\n[[package]]\nname = \"propcache\"\nversion = \"0.3.0\"\ndescription = \"Accelerated property cache\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"propcache-0.3.0-cp310-cp310-macosx_10_9_universal2.whl\", hash = \"sha256:efa44f64c37cc30c9f05932c740a8b40ce359f51882c70883cc95feac842da4d\"},\n {file = \"propcache-0.3.0-cp310-cp310-macosx_10_9_x86_64.whl\", hash = \"sha256:2383a17385d9800b6eb5855c2f05ee550f803878f344f58b6e194de08b96352c\"},\n {file = \"propcache-0.3.0-cp310-cp310-macosx_11_0_arm64.whl\", hash = \"sha256:d3e7420211f5a65a54675fd860ea04173cde60a7cc20ccfbafcccd155225f8bc\"},\n {file = \"propcache-0.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:3302c5287e504d23bb0e64d2a921d1eb4a03fb93a0a0aa3b53de059f5a5d737d\"},\n {file = \"propcache-0.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:7e2e068a83552ddf7a39a99488bcba05ac13454fb205c847674da0352602082f\"},\n {file = \"propcache-0.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:2d913d36bdaf368637b4f88d554fb9cb9d53d6920b9c5563846555938d5450bf\"},\n {file = \"propcache-0.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:8ee1983728964d6070ab443399c476de93d5d741f71e8f6e7880a065f878e0b9\"},\n {file = \"propcache-0.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:36ca5e9a21822cc1746023e88f5c0af6fce3af3b85d4520efb1ce4221bed75cc\"},\n {file = \"propcache-0.3.0-cp310-cp310-musllinux_1_2_aarch64.whl\", hash = \"sha256:9ecde3671e62eeb99e977f5221abcf40c208f69b5eb986b061ccec317c82ebd0\"},\n {file = \"propcache-0.3.0-cp310-cp310-musllinux_1_2_armv7l.whl\", hash = \"sha256:d383bf5e045d7f9d239b38e6acadd7b7fdf6c0087259a84ae3475d18e9a2ae8b\"},\n {file = \"propcache-0.3.0-cp310-cp310-musllinux_1_2_i686.whl\", hash = \"sha256:8cb625bcb5add899cb8ba7bf716ec1d3e8f7cdea9b0713fa99eadf73b6d4986f\"},\n {file = \"propcache-0.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl\", hash = \"sha256:5fa159dcee5dba00c1def3231c249cf261185189205073bde13797e57dd7540a\"},\n {file = \"propcache-0.3.0-cp310-cp310-musllinux_1_2_s390x.whl\", hash = \"sha256:a7080b0159ce05f179cfac592cda1a82898ca9cd097dacf8ea20ae33474fbb25\"},\n {file = \"propcache-0.3.0-cp310-cp310-musllinux_1_2_x86_64.whl\", hash = \"sha256:ed7161bccab7696a473fe7ddb619c1d75963732b37da4618ba12e60899fefe4f\"},\n {file = \"propcache-0.3.0-cp310-cp310-win32.whl\", hash = \"sha256:bf0d9a171908f32d54f651648c7290397b8792f4303821c42a74e7805bfb813c\"},\n {file = \"propcache-0.3.0-cp310-cp310-win_amd64.whl\", hash = \"sha256:42924dc0c9d73e49908e35bbdec87adedd651ea24c53c29cac103ede0ea1d340\"},\n {file = \"propcache-0.3.0-cp311-cp311-macosx_10_9_universal2.whl\", hash = \"sha256:9ddd49258610499aab83b4f5b61b32e11fce873586282a0e972e5ab3bcadee51\"},\n {file = \"propcache-0.3.0-cp311-cp311-macosx_10_9_x86_64.whl\", hash = \"sha256:2578541776769b500bada3f8a4eeaf944530516b6e90c089aa368266ed70c49e\"},\n {file = \"propcache-0.3.0-cp311-cp311-macosx_11_0_arm64.whl\", hash = \"sha256:d8074c5dd61c8a3e915fa8fc04754fa55cfa5978200d2daa1e2d4294c1f136aa\"},\n {file = \"propcache-0.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:b58229a844931bca61b3a20efd2be2a2acb4ad1622fc026504309a6883686fbf\"},\n {file = \"propcache-0.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:e45377d5d6fefe1677da2a2c07b024a6dac782088e37c0b1efea4cfe2b1be19b\"},\n {file = \"propcache-0.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:ec5060592d83454e8063e487696ac3783cc48c9a329498bafae0d972bc7816c9\"},\n {file = \"propcache-0.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:15010f29fbed80e711db272909a074dc79858c6d28e2915704cfc487a8ac89c6\"},\n {file = \"propcache-0.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:a254537b9b696ede293bfdbc0a65200e8e4507bc9f37831e2a0318a9b333c85c\"},\n {file = \"propcache-0.3.0-cp311-cp311-musllinux_1_2_aarch64.whl\", hash = \"sha256:2b975528998de037dfbc10144b8aed9b8dd5a99ec547f14d1cb7c5665a43f075\"},\n {file = \"propcache-0.3.0-cp311-cp311-musllinux_1_2_armv7l.whl\", hash = \"sha256:19d36bb351ad5554ff20f2ae75f88ce205b0748c38b146c75628577020351e3c\"},\n {file = \"propcache-0.3.0-cp311-cp311-musllinux_1_2_i686.whl\", hash = \"sha256:6032231d4a5abd67c7f71168fd64a47b6b451fbcb91c8397c2f7610e67683810\"},\n {file = \"propcache-0.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl\", hash = \"sha256:6985a593417cdbc94c7f9c3403747335e450c1599da1647a5af76539672464d3\"},\n {file = \"propcache-0.3.0-cp311-cp311-musllinux_1_2_s390x.whl\", hash = \"sha256:6a1948df1bb1d56b5e7b0553c0fa04fd0e320997ae99689488201f19fa90d2e7\"},\n {file = \"propcache-0.3.0-cp311-cp311-musllinux_1_2_x86_64.whl\", hash = \"sha256:8319293e85feadbbfe2150a5659dbc2ebc4afdeaf7d98936fb9a2f2ba0d4c35c\"},\n {file = \"propcache-0.3.0-cp311-cp311-win32.whl\", hash = \"sha256:63f26258a163c34542c24808f03d734b338da66ba91f410a703e505c8485791d\"},\n {file = \"propcache-0.3.0-cp311-cp311-win_amd64.whl\", hash = \"sha256:cacea77ef7a2195f04f9279297684955e3d1ae4241092ff0cfcef532bb7a1c32\"},\n {file = \"propcache-0.3.0-cp312-cp312-macosx_10_13_universal2.whl\", hash = \"sha256:e53d19c2bf7d0d1e6998a7e693c7e87300dd971808e6618964621ccd0e01fe4e\"},\n {file = \"propcache-0.3.0-cp312-cp312-macosx_10_13_x86_64.whl\", hash = \"sha256:a61a68d630e812b67b5bf097ab84e2cd79b48c792857dc10ba8a223f5b06a2af\"},\n {file = \"propcache-0.3.0-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:fb91d20fa2d3b13deea98a690534697742029f4fb83673a3501ae6e3746508b5\"},\n {file = \"propcache-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:67054e47c01b7b349b94ed0840ccae075449503cf1fdd0a1fdd98ab5ddc2667b\"},\n {file = \"propcache-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:997e7b8f173a391987df40f3b52c423e5850be6f6df0dcfb5376365440b56667\"},\n {file = \"propcache-0.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:8d663fd71491dde7dfdfc899d13a067a94198e90695b4321084c6e450743b8c7\"},\n {file = \"propcache-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:8884ba1a0fe7210b775106b25850f5e5a9dc3c840d1ae9924ee6ea2eb3acbfe7\"},\n {file = \"propcache-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:aa806bbc13eac1ab6291ed21ecd2dd426063ca5417dd507e6be58de20e58dfcf\"},\n {file = \"propcache-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:6f4d7a7c0aff92e8354cceca6fe223973ddf08401047920df0fcb24be2bd5138\"},\n {file = \"propcache-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl\", hash = \"sha256:9be90eebc9842a93ef8335291f57b3b7488ac24f70df96a6034a13cb58e6ff86\"},\n {file = \"propcache-0.3.0-cp312-cp312-musllinux_1_2_i686.whl\", hash = \"sha256:bf15fc0b45914d9d1b706f7c9c4f66f2b7b053e9517e40123e137e8ca8958b3d\"},\n {file = \"propcache-0.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl\", hash = \"sha256:5a16167118677d94bb48bfcd91e420088854eb0737b76ec374b91498fb77a70e\"},\n {file = \"propcache-0.3.0-cp312-cp312-musllinux_1_2_s390x.whl\", hash = \"sha256:41de3da5458edd5678b0f6ff66691507f9885f5fe6a0fb99a5d10d10c0fd2d64\"},\n {file = \"propcache-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:728af36011bb5d344c4fe4af79cfe186729efb649d2f8b395d1572fb088a996c\"},\n {file = \"propcache-0.3.0-cp312-cp312-win32.whl\", hash = \"sha256:6b5b7fd6ee7b54e01759f2044f936dcf7dea6e7585f35490f7ca0420fe723c0d\"},\n {file = \"propcache-0.3.0-cp312-cp312-win_amd64.whl\", hash = \"sha256:2d15bc27163cd4df433e75f546b9ac31c1ba7b0b128bfb1b90df19082466ff57\"},\n {file = \"propcache-0.3.0-cp313-cp313-macosx_10_13_universal2.whl\", hash = \"sha256:a2b9bf8c79b660d0ca1ad95e587818c30ccdb11f787657458d6f26a1ea18c568\"},\n {file = \"propcache-0.3.0-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:b0c1a133d42c6fc1f5fbcf5c91331657a1ff822e87989bf4a6e2e39b818d0ee9\"},\n {file = \"propcache-0.3.0-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:bb2f144c6d98bb5cbc94adeb0447cfd4c0f991341baa68eee3f3b0c9c0e83767\"},\n {file = \"propcache-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:d1323cd04d6e92150bcc79d0174ce347ed4b349d748b9358fd2e497b121e03c8\"},\n {file = \"propcache-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:3b812b3cb6caacd072276ac0492d249f210006c57726b6484a1e1805b3cfeea0\"},\n {file = \"propcache-0.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:742840d1d0438eb7ea4280f3347598f507a199a35a08294afdcc560c3739989d\"},\n {file = \"propcache-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:7c6e7e4f9167fddc438cd653d826f2222222564daed4116a02a184b464d3ef05\"},\n {file = \"propcache-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:a94ffc66738da99232ddffcf7910e0f69e2bbe3a0802e54426dbf0714e1c2ffe\"},\n {file = \"propcache-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:3c6ec957025bf32b15cbc6b67afe233c65b30005e4c55fe5768e4bb518d712f1\"},\n {file = \"propcache-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl\", hash = \"sha256:549722908de62aa0b47a78b90531c022fa6e139f9166be634f667ff45632cc92\"},\n {file = \"propcache-0.3.0-cp313-cp313-musllinux_1_2_i686.whl\", hash = \"sha256:5d62c4f6706bff5d8a52fd51fec6069bef69e7202ed481486c0bc3874912c787\"},\n {file = \"propcache-0.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl\", hash = \"sha256:24c04f8fbf60094c531667b8207acbae54146661657a1b1be6d3ca7773b7a545\"},\n {file = \"propcache-0.3.0-cp313-cp313-musllinux_1_2_s390x.whl\", hash = \"sha256:7c5f5290799a3f6539cc5e6f474c3e5c5fbeba74a5e1e5be75587746a940d51e\"},\n {file = \"propcache-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:4fa0e7c9c3cf7c276d4f6ab9af8adddc127d04e0fcabede315904d2ff76db626\"},\n {file = \"propcache-0.3.0-cp313-cp313-win32.whl\", hash = \"sha256:ee0bd3a7b2e184e88d25c9baa6a9dc609ba25b76daae942edfb14499ac7ec374\"},\n {file = \"propcache-0.3.0-cp313-cp313-win_amd64.whl\", hash = \"sha256:1c8f7d896a16da9455f882870a507567d4f58c53504dc2d4b1e1d386dfe4588a\"},\n {file = \"propcache-0.3.0-cp313-cp313t-macosx_10_13_universal2.whl\", hash = \"sha256:e560fd75aaf3e5693b91bcaddd8b314f4d57e99aef8a6c6dc692f935cc1e6bbf\"},\n {file = \"propcache-0.3.0-cp313-cp313t-macosx_10_13_x86_64.whl\", hash = \"sha256:65a37714b8ad9aba5780325228598a5b16c47ba0f8aeb3dc0514701e4413d7c0\"},\n {file = \"propcache-0.3.0-cp313-cp313t-macosx_11_0_arm64.whl\", hash = \"sha256:07700939b2cbd67bfb3b76a12e1412405d71019df00ca5697ce75e5ef789d829\"},\n {file = \"propcache-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:7c0fdbdf6983526e269e5a8d53b7ae3622dd6998468821d660d0daf72779aefa\"},\n {file = \"propcache-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:794c3dd744fad478b6232289c866c25406ecdfc47e294618bdf1697e69bd64a6\"},\n {file = \"propcache-0.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:4544699674faf66fb6b4473a1518ae4999c1b614f0b8297b1cef96bac25381db\"},\n {file = \"propcache-0.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:fddb8870bdb83456a489ab67c6b3040a8d5a55069aa6f72f9d872235fbc52f54\"},\n {file = \"propcache-0.3.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:f857034dc68d5ceb30fb60afb6ff2103087aea10a01b613985610e007053a121\"},\n {file = \"propcache-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl\", hash = \"sha256:02df07041e0820cacc8f739510078f2aadcfd3fc57eaeeb16d5ded85c872c89e\"},\n {file = \"propcache-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl\", hash = \"sha256:f47d52fd9b2ac418c4890aad2f6d21a6b96183c98021f0a48497a904199f006e\"},\n {file = \"propcache-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl\", hash = \"sha256:9ff4e9ecb6e4b363430edf2c6e50173a63e0820e549918adef70515f87ced19a\"},\n {file = \"propcache-0.3.0-cp313-cp313t-musllinux_1_2_ppc64le.whl\", hash = \"sha256:ecc2920630283e0783c22e2ac94427f8cca29a04cfdf331467d4f661f4072dac\"},\n {file = \"propcache-0.3.0-cp313-cp313t-musllinux_1_2_s390x.whl\", hash = \"sha256:c441c841e82c5ba7a85ad25986014be8d7849c3cfbdb6004541873505929a74e\"},\n {file = \"propcache-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl\", hash = \"sha256:6c929916cbdb540d3407c66f19f73387f43e7c12fa318a66f64ac99da601bcdf\"},\n {file = \"propcache-0.3.0-cp313-cp313t-win32.whl\", hash = \"sha256:0c3e893c4464ebd751b44ae76c12c5f5c1e4f6cbd6fbf67e3783cd93ad221863\"},\n {file = \"propcache-0.3.0-cp313-cp313t-win_amd64.whl\", hash = \"sha256:75e872573220d1ee2305b35c9813626e620768248425f58798413e9c39741f46\"},\n {file = \"propcache-0.3.0-cp39-cp39-macosx_10_9_universal2.whl\", hash = \"sha256:03c091bb752349402f23ee43bb2bff6bd80ccab7c9df6b88ad4322258d6960fc\"},\n {file = \"propcache-0.3.0-cp39-cp39-macosx_10_9_x86_64.whl\", hash = \"sha256:46ed02532cb66612d42ae5c3929b5e98ae330ea0f3900bc66ec5f4862069519b\"},\n {file = \"propcache-0.3.0-cp39-cp39-macosx_11_0_arm64.whl\", hash = \"sha256:11ae6a8a01b8a4dc79093b5d3ca2c8a4436f5ee251a9840d7790dccbd96cb649\"},\n {file = \"propcache-0.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:df03cd88f95b1b99052b52b1bb92173229d7a674df0ab06d2b25765ee8404bce\"},\n {file = \"propcache-0.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:03acd9ff19021bd0567582ac88f821b66883e158274183b9e5586f678984f8fe\"},\n {file = \"propcache-0.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:cd54895e4ae7d32f1e3dd91261df46ee7483a735017dc6f987904f194aa5fd14\"},\n {file = \"propcache-0.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:26a67e5c04e3119594d8cfae517f4b9330c395df07ea65eab16f3d559b7068fe\"},\n {file = \"propcache-0.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:ee25f1ac091def37c4b59d192bbe3a206298feeb89132a470325bf76ad122a1e\"},\n {file = \"propcache-0.3.0-cp39-cp39-musllinux_1_2_aarch64.whl\", hash = \"sha256:58e6d2a5a7cb3e5f166fd58e71e9a4ff504be9dc61b88167e75f835da5764d07\"},\n {file = \"propcache-0.3.0-cp39-cp39-musllinux_1_2_armv7l.whl\", hash = \"sha256:be90c94570840939fecedf99fa72839aed70b0ced449b415c85e01ae67422c90\"},\n {file = \"propcache-0.3.0-cp39-cp39-musllinux_1_2_i686.whl\", hash = \"sha256:49ea05212a529c2caffe411e25a59308b07d6e10bf2505d77da72891f9a05641\"},\n {file = \"propcache-0.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl\", hash = \"sha256:119e244ab40f70a98c91906d4c1f4c5f2e68bd0b14e7ab0a06922038fae8a20f\"},\n {file = \"propcache-0.3.0-cp39-cp39-musllinux_1_2_s390x.whl\", hash = \"sha256:507c5357a8d8b4593b97fb669c50598f4e6cccbbf77e22fa9598aba78292b4d7\"},\n {file = \"propcache-0.3.0-cp39-cp39-musllinux_1_2_x86_64.whl\", hash = \"sha256:8526b0941ec5a40220fc4dfde76aed58808e2b309c03e9fa8e2260083ef7157f\"},\n {file = \"propcache-0.3.0-cp39-cp39-win32.whl\", hash = \"sha256:7cedd25e5f678f7738da38037435b340694ab34d424938041aa630d8bac42663\"},\n {file = \"propcache-0.3.0-cp39-cp39-win_amd64.whl\", hash = \"sha256:bf4298f366ca7e1ad1d21bbb58300a6985015909964077afd37559084590c929\"},\n {file = \"propcache-0.3.0-py3-none-any.whl\", hash = \"sha256:67dda3c7325691c2081510e92c561f465ba61b975f481735aefdfc845d2cd043\"},\n {file = \"propcache-0.3.0.tar.gz\", hash = \"sha256:a8fd93de4e1d278046345f49e2238cdb298589325849b2645d4a94c53faeffc5\"},\n]\n\n[[package]]\nname = \"protobuf\"\nversion = \"5.29.4\"\ndescription = \"\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"protobuf-5.29.4-cp310-abi3-win32.whl\", hash = \"sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7\"},\n {file = \"protobuf-5.29.4-cp310-abi3-win_amd64.whl\", hash = \"sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d\"},\n {file = \"protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl\", hash = \"sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0\"},\n {file = \"protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl\", hash = \"sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e\"},\n {file = \"protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl\", hash = \"sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922\"},\n {file = \"protobuf-5.29.4-cp38-cp38-win32.whl\", hash = \"sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de\"},\n {file = \"protobuf-5.29.4-cp38-cp38-win_amd64.whl\", hash = \"sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68\"},\n {file = \"protobuf-5.29.4-cp39-cp39-win32.whl\", hash = \"sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe\"},\n {file = \"protobuf-5.29.4-cp39-cp39-win_amd64.whl\", hash = \"sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812\"},\n {file = \"protobuf-5.29.4-py3-none-any.whl\", hash = \"sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862\"},\n {file = \"protobuf-5.29.4.tar.gz\", hash = \"sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99\"},\n]\n\n[[package]]\nname = \"pydantic\"\nversion = \"2.10.6\"\ndescription = \"Data validation using Python type hints\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"pydantic-2.10.6-py3-none-any.whl\", hash = \"sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584\"},\n {file = \"pydantic-2.10.6.tar.gz\", hash = \"sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236\"},\n]\n\n[package.dependencies]\nannotated-types = \">=0.6.0\"\npydantic-core = \"2.27.2\"\ntyping-extensions = \">=4.12.2\"\n\n[package.extras]\nemail = [\"email-validator (>=2.0.0)\"]\ntimezone = [\"tzdata\"]\n\n[[package]]\nname = \"pydantic-core\"\nversion = \"2.27.2\"\ndescription = \"Core functionality for Pydantic validation and serialization\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl\", hash = \"sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl\", hash = \"sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl\", hash = \"sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl\", hash = \"sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl\", hash = \"sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl\", hash = \"sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-win32.whl\", hash = \"sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4\"},\n {file = \"pydantic_core-2.27.2-cp310-cp310-win_amd64.whl\", hash = \"sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl\", hash = \"sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl\", hash = \"sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl\", hash = \"sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl\", hash = \"sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl\", hash = \"sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl\", hash = \"sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-win32.whl\", hash = \"sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-win_amd64.whl\", hash = \"sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc\"},\n {file = \"pydantic_core-2.27.2-cp311-cp311-win_arm64.whl\", hash = \"sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl\", hash = \"sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl\", hash = \"sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl\", hash = \"sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl\", hash = \"sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl\", hash = \"sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-win32.whl\", hash = \"sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-win_amd64.whl\", hash = \"sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9\"},\n {file = \"pydantic_core-2.27.2-cp312-cp312-win_arm64.whl\", hash = \"sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl\", hash = \"sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl\", hash = \"sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl\", hash = \"sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl\", hash = \"sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl\", hash = \"sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-win32.whl\", hash = \"sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-win_amd64.whl\", hash = \"sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee\"},\n {file = \"pydantic_core-2.27.2-cp313-cp313-win_arm64.whl\", hash = \"sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl\", hash = \"sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl\", hash = \"sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl\", hash = \"sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl\", hash = \"sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl\", hash = \"sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl\", hash = \"sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-win32.whl\", hash = \"sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b\"},\n {file = \"pydantic_core-2.27.2-cp38-cp38-win_amd64.whl\", hash = \"sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl\", hash = \"sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl\", hash = \"sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl\", hash = \"sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl\", hash = \"sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl\", hash = \"sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl\", hash = \"sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-win32.whl\", hash = \"sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e\"},\n {file = \"pydantic_core-2.27.2-cp39-cp39-win_amd64.whl\", hash = \"sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl\", hash = \"sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl\", hash = \"sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl\", hash = \"sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl\", hash = \"sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl\", hash = \"sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9\"},\n {file = \"pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl\", hash = \"sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl\", hash = \"sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl\", hash = \"sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl\", hash = \"sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl\", hash = \"sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl\", hash = \"sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl\", hash = \"sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2\"},\n {file = \"pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl\", hash = \"sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35\"},\n {file = \"pydantic_core-2.27.2.tar.gz\", hash = \"sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39\"},\n]\n\n[package.dependencies]\ntyping-extensions = \">=4.6.0,<4.7.0 || >4.7.0\"\n\n[[package]]\nname = \"pydantic-settings\"\nversion = \"2.8.1\"\ndescription = \"Settings management using Pydantic\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"pydantic_settings-2.8.1-py3-none-any.whl\", hash = \"sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c\"},\n {file = \"pydantic_settings-2.8.1.tar.gz\", hash = \"sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585\"},\n]\n\n[package.dependencies]\npydantic = \">=2.7.0\"\npython-dotenv = \">=0.21.0\"\n\n[package.extras]\nazure-key-vault = [\"azure-identity (>=1.16.0)\", \"azure-keyvault-secrets (>=4.8.0)\"]\ntoml = [\"tomli (>=2.0.1)\"]\nyaml = [\"pyyaml (>=6.0.1)\"]\n\n[[package]]\nname = \"python-dateutil\"\nversion = \"2.9.0.post0\"\ndescription = \"Extensions to the standard Python datetime module\"\noptional = false\npython-versions = \"!=3.0.*,!=3.1.*,!=3.2.*,>=2.7\"\ngroups = [\"main\"]\nfiles = [\n {file = \"python-dateutil-2.9.0.post0.tar.gz\", hash = \"sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3\"},\n {file = \"python_dateutil-2.9.0.post0-py2.py3-none-any.whl\", hash = \"sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427\"},\n]\n\n[package.dependencies]\nsix = \">=1.5\"\n\n[[package]]\nname = \"python-dotenv\"\nversion = \"1.0.1\"\ndescription = \"Read key-value pairs from a .env file and set them as environment variables\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"python-dotenv-1.0.1.tar.gz\", hash = \"sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca\"},\n {file = \"python_dotenv-1.0.1-py3-none-any.whl\", hash = \"sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a\"},\n]\n\n[package.extras]\ncli = [\"click (>=5.0)\"]\n\n[[package]]\nname = \"pyyaml\"\nversion = \"6.0.2\"\ndescription = \"YAML parser and emitter for Python\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl\", hash = \"sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086\"},\n {file = \"PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl\", hash = \"sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf\"},\n {file = \"PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237\"},\n {file = \"PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b\"},\n {file = \"PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed\"},\n {file = \"PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl\", hash = \"sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180\"},\n {file = \"PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl\", hash = \"sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68\"},\n {file = \"PyYAML-6.0.2-cp310-cp310-win32.whl\", hash = \"sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99\"},\n {file = \"PyYAML-6.0.2-cp310-cp310-win_amd64.whl\", hash = \"sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl\", hash = \"sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl\", hash = \"sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl\", hash = \"sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl\", hash = \"sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-win32.whl\", hash = \"sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5\"},\n {file = \"PyYAML-6.0.2-cp311-cp311-win_amd64.whl\", hash = \"sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl\", hash = \"sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl\", hash = \"sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl\", hash = \"sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-win32.whl\", hash = \"sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4\"},\n {file = \"PyYAML-6.0.2-cp312-cp312-win_amd64.whl\", hash = \"sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl\", hash = \"sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl\", hash = \"sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-win32.whl\", hash = \"sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183\"},\n {file = \"PyYAML-6.0.2-cp313-cp313-win_amd64.whl\", hash = \"sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563\"},\n {file = \"PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl\", hash = \"sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a\"},\n {file = \"PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5\"},\n {file = \"PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d\"},\n {file = \"PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083\"},\n {file = \"PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl\", hash = \"sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706\"},\n {file = \"PyYAML-6.0.2-cp38-cp38-win32.whl\", hash = \"sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a\"},\n {file = \"PyYAML-6.0.2-cp38-cp38-win_amd64.whl\", hash = \"sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl\", hash = \"sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl\", hash = \"sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl\", hash = \"sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl\", hash = \"sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-win32.whl\", hash = \"sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631\"},\n {file = \"PyYAML-6.0.2-cp39-cp39-win_amd64.whl\", hash = \"sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8\"},\n {file = \"pyyaml-6.0.2.tar.gz\", hash = \"sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e\"},\n]\n\n[[package]]\nname = \"setuptools\"\nversion = \"78.0.2\"\ndescription = \"Easily download, build, install, upgrade, and uninstall Python packages\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"setuptools-78.0.2-py3-none-any.whl\", hash = \"sha256:4a612c80e1f1d71b80e4906ce730152e8dec23df439f82731d9d0b608d7b700d\"},\n {file = \"setuptools-78.0.2.tar.gz\", hash = \"sha256:137525e6afb9022f019d6e884a319017f9bf879a0d8783985d32cbc8683cab93\"},\n]\n\n[package.extras]\ncheck = [\"pytest-checkdocs (>=2.4)\", \"pytest-ruff (>=0.2.1)\", \"ruff (>=0.8.0)\"]\ncore = [\"importlib_metadata (>=6)\", \"jaraco.functools (>=4)\", \"jaraco.text (>=3.7)\", \"more_itertools\", \"more_itertools (>=8.8)\", \"packaging (>=24.2)\", \"platformdirs (>=4.2.2)\", \"tomli (>=2.0.1)\", \"wheel (>=0.43.0)\"]\ncover = [\"pytest-cov\"]\ndoc = [\"furo\", \"jaraco.packaging (>=9.3)\", \"jaraco.tidelift (>=1.4)\", \"pygments-github-lexers (==0.0.5)\", \"pyproject-hooks (!=1.1)\", \"rst.linker (>=1.9)\", \"sphinx (>=3.5)\", \"sphinx-favicon\", \"sphinx-inline-tabs\", \"sphinx-lint\", \"sphinx-notfound-page (>=1,<2)\", \"sphinx-reredirects\", \"sphinxcontrib-towncrier\", \"towncrier (<24.7)\"]\nenabler = [\"pytest-enabler (>=2.2)\"]\ntest = [\"build[virtualenv] (>=1.0.3)\", \"filelock (>=3.4.0)\", \"ini2toml[lite] (>=0.14)\", \"jaraco.develop (>=7.21)\", \"jaraco.envs (>=2.2)\", \"jaraco.path (>=3.7.2)\", \"jaraco.test (>=5.5)\", \"packaging (>=24.2)\", \"pip (>=19.1)\", \"pyproject-hooks (!=1.1)\", \"pytest (>=6,!=8.1.*)\", \"pytest-home (>=0.5)\", \"pytest-perf\", \"pytest-subprocess\", \"pytest-timeout\", \"pytest-xdist (>=3)\", \"tomli-w (>=1.0.0)\", \"virtualenv (>=13.0.0)\", \"wheel (>=0.44.0)\"]\ntype = [\"importlib_metadata (>=7.0.2)\", \"jaraco.develop (>=7.21)\", \"mypy (==1.14.*)\", \"pytest-mypy\"]\n\n[[package]]\nname = \"six\"\nversion = \"1.17.0\"\ndescription = \"Python 2 and 3 compatibility utilities\"\noptional = false\npython-versions = \"!=3.0.*,!=3.1.*,!=3.2.*,>=2.7\"\ngroups = [\"main\"]\nfiles = [\n {file = \"six-1.17.0-py2.py3-none-any.whl\", hash = \"sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274\"},\n {file = \"six-1.17.0.tar.gz\", hash = \"sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81\"},\n]\n\n[[package]]\nname = \"tenacity\"\nversion = \"9.0.0\"\ndescription = \"Retry code until it succeeds\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"tenacity-9.0.0-py3-none-any.whl\", hash = \"sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539\"},\n {file = \"tenacity-9.0.0.tar.gz\", hash = \"sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b\"},\n]\n\n[package.extras]\ndoc = [\"reno\", \"sphinx\"]\ntest = [\"pytest\", \"tornado (>=4.5)\", \"typeguard\"]\n\n[[package]]\nname = \"types-python-dateutil\"\nversion = \"2.9.0.20241206\"\ndescription = \"Typing stubs for python-dateutil\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"types_python_dateutil-2.9.0.20241206-py3-none-any.whl\", hash = \"sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53\"},\n {file = \"types_python_dateutil-2.9.0.20241206.tar.gz\", hash = \"sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb\"},\n]\n\n[[package]]\nname = \"types-pyyaml\"\nversion = \"6.0.12.20241230\"\ndescription = \"Typing stubs for PyYAML\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"types_PyYAML-6.0.12.20241230-py3-none-any.whl\", hash = \"sha256:fa4d32565219b68e6dee5f67534c722e53c00d1cfc09c435ef04d7353e1e96e6\"},\n {file = \"types_pyyaml-6.0.12.20241230.tar.gz\", hash = \"sha256:7f07622dbd34bb9c8b264fe860a17e0efcad00d50b5f27e93984909d9363498c\"},\n]\n\n[[package]]\nname = \"typing-extensions\"\nversion = \"4.12.2\"\ndescription = \"Backported and Experimental Type Hints for Python 3.8+\"\noptional = false\npython-versions = \">=3.8\"\ngroups = [\"main\"]\nfiles = [\n {file = \"typing_extensions-4.12.2-py3-none-any.whl\", hash = \"sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d\"},\n {file = \"typing_extensions-4.12.2.tar.gz\", hash = \"sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8\"},\n]\n\n[[package]]\nname = \"urllib3\"\nversion = \"2.3.0\"\ndescription = \"HTTP library with thread-safe connection pooling, file post, and more.\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"urllib3-2.3.0-py3-none-any.whl\", hash = \"sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df\"},\n {file = \"urllib3-2.3.0.tar.gz\", hash = \"sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d\"},\n]\n\n[package.extras]\nbrotli = [\"brotli (>=1.0.9)\", \"brotlicffi (>=0.8.0)\"]\nh2 = [\"h2 (>=4,<5)\"]\nsocks = [\"pysocks (>=1.5.6,!=1.5.7,<2.0)\"]\nzstd = [\"zstandard (>=0.18.0)\"]\n\n[[package]]\nname = \"yarl\"\nversion = \"1.18.3\"\ndescription = \"Yet another URL library\"\noptional = false\npython-versions = \">=3.9\"\ngroups = [\"main\"]\nfiles = [\n {file = \"yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl\", hash = \"sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34\"},\n {file = \"yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl\", hash = \"sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7\"},\n {file = \"yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl\", hash = \"sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed\"},\n {file = \"yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde\"},\n {file = \"yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b\"},\n {file = \"yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5\"},\n {file = \"yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc\"},\n {file = \"yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd\"},\n {file = \"yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl\", hash = \"sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990\"},\n {file = \"yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl\", hash = \"sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db\"},\n {file = \"yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl\", hash = \"sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62\"},\n {file = \"yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl\", hash = \"sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760\"},\n {file = \"yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl\", hash = \"sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b\"},\n {file = \"yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl\", hash = \"sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690\"},\n {file = \"yarl-1.18.3-cp310-cp310-win32.whl\", hash = \"sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6\"},\n {file = \"yarl-1.18.3-cp310-cp310-win_amd64.whl\", hash = \"sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8\"},\n {file = \"yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl\", hash = \"sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069\"},\n {file = \"yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl\", hash = \"sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193\"},\n {file = \"yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl\", hash = \"sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889\"},\n {file = \"yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8\"},\n {file = \"yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca\"},\n {file = \"yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8\"},\n {file = \"yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae\"},\n {file = \"yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3\"},\n {file = \"yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl\", hash = \"sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb\"},\n {file = \"yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl\", hash = \"sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e\"},\n {file = \"yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl\", hash = \"sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59\"},\n {file = \"yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl\", hash = \"sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d\"},\n {file = \"yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl\", hash = \"sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e\"},\n {file = \"yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl\", hash = \"sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a\"},\n {file = \"yarl-1.18.3-cp311-cp311-win32.whl\", hash = \"sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1\"},\n {file = \"yarl-1.18.3-cp311-cp311-win_amd64.whl\", hash = \"sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5\"},\n {file = \"yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl\", hash = \"sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50\"},\n {file = \"yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl\", hash = \"sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576\"},\n {file = \"yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl\", hash = \"sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640\"},\n {file = \"yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2\"},\n {file = \"yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75\"},\n {file = \"yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512\"},\n {file = \"yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba\"},\n {file = \"yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb\"},\n {file = \"yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl\", hash = \"sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272\"},\n {file = \"yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl\", hash = \"sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6\"},\n {file = \"yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl\", hash = \"sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e\"},\n {file = \"yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl\", hash = \"sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb\"},\n {file = \"yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl\", hash = \"sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393\"},\n {file = \"yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl\", hash = \"sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285\"},\n {file = \"yarl-1.18.3-cp312-cp312-win32.whl\", hash = \"sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2\"},\n {file = \"yarl-1.18.3-cp312-cp312-win_amd64.whl\", hash = \"sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477\"},\n {file = \"yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl\", hash = \"sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb\"},\n {file = \"yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl\", hash = \"sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa\"},\n {file = \"yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl\", hash = \"sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782\"},\n {file = \"yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0\"},\n {file = \"yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482\"},\n {file = \"yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186\"},\n {file = \"yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58\"},\n {file = \"yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53\"},\n {file = \"yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl\", hash = \"sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2\"},\n {file = \"yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl\", hash = \"sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8\"},\n {file = \"yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl\", hash = \"sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1\"},\n {file = \"yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl\", hash = \"sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a\"},\n {file = \"yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl\", hash = \"sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10\"},\n {file = \"yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl\", hash = \"sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8\"},\n {file = \"yarl-1.18.3-cp313-cp313-win32.whl\", hash = \"sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d\"},\n {file = \"yarl-1.18.3-cp313-cp313-win_amd64.whl\", hash = \"sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c\"},\n {file = \"yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl\", hash = \"sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04\"},\n {file = \"yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl\", hash = \"sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719\"},\n {file = \"yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl\", hash = \"sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e\"},\n {file = \"yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl\", hash = \"sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee\"},\n {file = \"yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl\", hash = \"sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789\"},\n {file = \"yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl\", hash = \"sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8\"},\n {file = \"yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl\", hash = \"sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c\"},\n {file = \"yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl\", hash = \"sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5\"},\n {file = \"yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl\", hash = \"sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1\"},\n {file = \"yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl\", hash = \"sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24\"},\n {file = \"yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl\", hash = \"sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318\"},\n {file = \"yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl\", hash = \"sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985\"},\n {file = \"yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl\", hash = \"sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910\"},\n {file = \"yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl\", hash = \"sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1\"},\n {file = \"yarl-1.18.3-cp39-cp39-win32.whl\", hash = \"sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5\"},\n {file = \"yarl-1.18.3-cp39-cp39-win_amd64.whl\", hash = \"sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9\"},\n {file = \"yarl-1.18.3-py3-none-any.whl\", hash = \"sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b\"},\n {file = \"yarl-1.18.3.tar.gz\", hash = \"sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1\"},\n]\n\n[package.dependencies]\nidna = \">=2.0\"\nmultidict = \">=4.0\"\npropcache = \">=0.2.0\"\n\n[metadata]\nlock-version = \"2.1\"\npython-versions = \"^3.10\"\ncontent-hash = \"74c12e499aa797ca5c8559af579f1212b0e4e3a77f068f9385db39d70ba304e0\"\n", - "source": "out/python/quickstart/poetry.lock", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/pyproject.ts b/frontend/docs/lib/generated/snips/python/quickstart/pyproject.ts deleted file mode 100644 index 3df3a7167..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/pyproject.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "[tool.poetry]\nname = \"hatchet-python-quickstart\"\nversion = \"0.1.0\"\ndescription = \"Simple Setup to Run Hatchet Workflows\"\nauthors = [\"gabriel ruttner \"]\nreadme = \"README.md\"\npackage-mode = false\n\n[tool.poetry.dependencies]\npython = \"^3.10\"\nhatchet-sdk = \"1.0.0a1\"\n\n\n[build-system]\nrequires = [\"poetry-core\"]\nbuild-backend = \"poetry.core.masonry.api\"\n\n[tool.poetry.scripts]\nsimple = \"src.run:main\"\nworker = \"src.worker:main\"\n", - "source": "out/python/quickstart/pyproject.toml", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/run.ts b/frontend/docs/lib/generated/snips/python/quickstart/run.ts deleted file mode 100644 index faffcfc16..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom .workflows.first_task import SimpleInput, first_task\n\n\nasync def main() -> None:\n result = await first_task.aio_run(SimpleInput(message=\"Hello World!\"))\n\n print(\n \"Finished running task, and got the transformed message! The transformed message is:\",\n result.transformed_message,\n )\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/quickstart/run.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/worker.ts b/frontend/docs/lib/generated/snips/python/quickstart/worker.ts deleted file mode 100644 index a6c76f600..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from .hatchet_client import hatchet\nfrom .workflows.first_task import first_task\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"first-worker\",\n slots=10,\n workflows=[first_task],\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/quickstart/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/workflows/__init__.ts b/frontend/docs/lib/generated/snips/python/quickstart/workflows/__init__.ts deleted file mode 100644 index 48c1784a7..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/workflows/__init__.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "", - "source": "out/python/quickstart/workflows/__init__.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/workflows/first_task.ts b/frontend/docs/lib/generated/snips/python/quickstart/workflows/first_task.ts deleted file mode 100644 index a24ccbe56..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/workflows/first_task.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Context\n\nfrom ..hatchet_client import hatchet\n\n\nclass SimpleInput(BaseModel):\n message: str\n\n\nclass SimpleOutput(BaseModel):\n transformed_message: str\n\n\n# Declare the task to run\n@hatchet.task(name=\"first-task\", input_validator=SimpleInput)\ndef first_task(input: SimpleInput, ctx: Context) -> SimpleOutput:\n print(\"first-task task called\")\n\n return SimpleOutput(transformed_message=input.message.lower())\n", - "source": "out/python/quickstart/workflows/first_task.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/quickstart/workflows/index.ts b/frontend/docs/lib/generated/snips/python/quickstart/workflows/index.ts deleted file mode 100644 index 99c1c721e..000000000 --- a/frontend/docs/lib/generated/snips/python/quickstart/workflows/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import __init__ from './__init__'; -import first_task from './first_task'; - -export { __init__ } -export { first_task } diff --git a/frontend/docs/lib/generated/snips/python/rate_limit/dynamic.ts b/frontend/docs/lib/generated/snips/python/rate_limit/dynamic.ts deleted file mode 100644 index ffb384a20..000000000 --- a/frontend/docs/lib/generated/snips/python/rate_limit/dynamic.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\nfrom hatchet_sdk.rate_limit import RateLimit\n\nhatchet = Hatchet(debug=True)\n\n\nclass DynamicRateLimitInput(BaseModel):\n group: str\n units: int\n limit: int\n\n\ndynamic_rate_limit_workflow = hatchet.workflow(\n name=\"DynamicRateLimitWorkflow\", input_validator=DynamicRateLimitInput\n)\n\n\n@dynamic_rate_limit_workflow.task(\n rate_limits=[\n RateLimit(\n dynamic_key='\"LIMIT:\"+input.group',\n units=\"input.units\",\n limit=\"input.limit\",\n )\n ]\n)\ndef step1(input: DynamicRateLimitInput, ctx: Context) -> None:\n print(\"executed step1\")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"rate-limit-worker\", slots=10, workflows=[dynamic_rate_limit_workflow]\n )\n worker.start()\n", - "source": "out/python/rate_limit/dynamic.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/rate_limit/index.ts b/frontend/docs/lib/generated/snips/python/rate_limit/index.ts deleted file mode 100644 index 19ada7351..000000000 --- a/frontend/docs/lib/generated/snips/python/rate_limit/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import dynamic from './dynamic'; -import trigger from './trigger'; -import worker from './worker'; - -export { dynamic } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/rate_limit/trigger.ts b/frontend/docs/lib/generated/snips/python/rate_limit/trigger.ts deleted file mode 100644 index 2ad8e52ca..000000000 --- a/frontend/docs/lib/generated/snips/python/rate_limit/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.rate_limit.worker import rate_limit_workflow\nfrom hatchet_sdk.hatchet import Hatchet\n\nhatchet = Hatchet(debug=True)\n\nrate_limit_workflow.run()\nrate_limit_workflow.run()\nrate_limit_workflow.run()\n", - "source": "out/python/rate_limit/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/rate_limit/worker.ts b/frontend/docs/lib/generated/snips/python/rate_limit/worker.ts deleted file mode 100644 index 232477de6..000000000 --- a/frontend/docs/lib/generated/snips/python/rate_limit/worker.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\nfrom hatchet_sdk.rate_limit import RateLimit, RateLimitDuration\n\nhatchet = Hatchet(debug=True)\n\n\n# > Workflow\nclass RateLimitInput(BaseModel):\n user_id: str\n\n\nrate_limit_workflow = hatchet.workflow(\n name=\"RateLimitWorkflow\", input_validator=RateLimitInput\n)\n\n\n\n# > Static\nRATE_LIMIT_KEY = \"test-limit\"\n\n\n@rate_limit_workflow.task(rate_limits=[RateLimit(static_key=RATE_LIMIT_KEY, units=1)])\ndef step_1(input: RateLimitInput, ctx: Context) -> None:\n print(\"executed step_1\")\n\n\n\n# > Dynamic\n\n\n@rate_limit_workflow.task(\n rate_limits=[\n RateLimit(\n dynamic_key=\"input.user_id\",\n units=1,\n limit=10,\n duration=RateLimitDuration.MINUTE,\n )\n ]\n)\ndef step_2(input: RateLimitInput, ctx: Context) -> None:\n print(\"executed step_2\")\n\n\n\n\ndef main() -> None:\n hatchet.rate_limits.put(RATE_LIMIT_KEY, 2, RateLimitDuration.SECOND)\n\n worker = hatchet.worker(\n \"rate-limit-worker\", slots=10, workflows=[rate_limit_workflow]\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/rate_limit/worker.py", - "blocks": { - "workflow": { - "start": 10, - "stop": 17 - }, - "static": { - "start": 21, - "stop": 28 - }, - "dynamic": { - "start": 31, - "stop": 46 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/retries/index.ts b/frontend/docs/lib/generated/snips/python/retries/index.ts deleted file mode 100644 index 30b1adbeb..000000000 --- a/frontend/docs/lib/generated/snips/python/retries/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/retries/worker.ts b/frontend/docs/lib/generated/snips/python/retries/worker.ts deleted file mode 100644 index bc3978a8e..000000000 --- a/frontend/docs/lib/generated/snips/python/retries/worker.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\nsimple_workflow = hatchet.workflow(name=\"SimpleRetryWorkflow\")\nbackoff_workflow = hatchet.workflow(name=\"BackoffWorkflow\")\n\n\n# > Simple Step Retries\n@simple_workflow.task(retries=3)\ndef always_fail(input: EmptyModel, ctx: Context) -> dict[str, str]:\n raise Exception(\"simple task failed\")\n\n\n\n\n# > Retries with Count\n@simple_workflow.task(retries=3)\ndef fail_twice(input: EmptyModel, ctx: Context) -> dict[str, str]:\n if ctx.retry_count < 2:\n raise Exception(\"simple task failed\")\n\n return {\"status\": \"success\"}\n\n\n\n\n# > Retries with Backoff\n@backoff_workflow.task(\n retries=10,\n # 👀 Maximum number of seconds to wait between retries\n backoff_max_seconds=10,\n # 👀 Factor to increase the wait time between retries.\n # This sequence will be 2s, 4s, 8s, 10s, 10s, 10s... due to the maxSeconds limit\n backoff_factor=2.0,\n)\ndef backoff_task(input: EmptyModel, ctx: Context) -> dict[str, str]:\n if ctx.retry_count < 3:\n raise Exception(\"backoff task failed\")\n\n return {\"status\": \"success\"}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"backoff-worker\", slots=4, workflows=[backoff_workflow])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/retries/worker.py", - "blocks": { - "simple_step_retries": { - "start": 10, - "stop": 14 - }, - "retries_with_count": { - "start": 18, - "stop": 25 - }, - "retries_with_backoff": { - "start": 29, - "stop": 43 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/return_exceptions/index.ts b/frontend/docs/lib/generated/snips/python/return_exceptions/index.ts deleted file mode 100644 index be3bd6a32..000000000 --- a/frontend/docs/lib/generated/snips/python/return_exceptions/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_return_exceptions from './test_return_exceptions'; -import worker from './worker'; - -export { test_return_exceptions } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/return_exceptions/test_return_exceptions.ts b/frontend/docs/lib/generated/snips/python/return_exceptions/test_return_exceptions.ts deleted file mode 100644 index 592103ae2..000000000 --- a/frontend/docs/lib/generated/snips/python/return_exceptions/test_return_exceptions.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nimport pytest\n\nfrom examples.return_exceptions.worker import Input, return_exceptions_task\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_return_exceptions_async() -> None:\n results = await return_exceptions_task.aio_run_many(\n [\n return_exceptions_task.create_bulk_run_item(input=Input(index=i))\n for i in range(10)\n ],\n return_exceptions=True,\n )\n\n for i, result in enumerate(results):\n if i % 2 == 0:\n assert isinstance(result, Exception)\n assert f\"error in task with index {i}\" in str(result)\n else:\n assert result == {\"message\": \"this is a successful task.\"}\n\n\ndef test_return_exceptions_sync() -> None:\n results = return_exceptions_task.run_many(\n [\n return_exceptions_task.create_bulk_run_item(input=Input(index=i))\n for i in range(10)\n ],\n return_exceptions=True,\n )\n\n for i, result in enumerate(results):\n if i % 2 == 0:\n assert isinstance(result, Exception)\n assert f\"error in task with index {i}\" in str(result)\n else:\n assert result == {\"message\": \"this is a successful task.\"}\n", - "source": "out/python/return_exceptions/test_return_exceptions.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/return_exceptions/worker.ts b/frontend/docs/lib/generated/snips/python/return_exceptions/worker.ts deleted file mode 100644 index 70612f9d5..000000000 --- a/frontend/docs/lib/generated/snips/python/return_exceptions/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet()\n\n\nclass Input(EmptyModel):\n index: int\n\n\n@hatchet.task(input_validator=Input)\nasync def return_exceptions_task(input: Input, ctx: Context) -> dict[str, str]:\n if input.index % 2 == 0:\n raise ValueError(f\"error in task with index {input.index}\")\n\n return {\"message\": \"this is a successful task.\"}\n", - "source": "out/python/return_exceptions/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/scheduled/index.ts b/frontend/docs/lib/generated/snips/python/scheduled/index.ts deleted file mode 100644 index ed1b5d617..000000000 --- a/frontend/docs/lib/generated/snips/python/scheduled/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import programatic_async from './programatic-async'; -import programatic_sync from './programatic-sync'; - -export { programatic_async } -export { programatic_sync } diff --git a/frontend/docs/lib/generated/snips/python/scheduled/programatic-async.ts b/frontend/docs/lib/generated/snips/python/scheduled/programatic-async.ts deleted file mode 100644 index 0291cdf59..000000000 --- a/frontend/docs/lib/generated/snips/python/scheduled/programatic-async.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from datetime import datetime, timedelta, timezone\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n\nasync def create_scheduled() -> None:\n # > Create\n scheduled_run = await hatchet.scheduled.aio_create(\n workflow_name=\"simple-workflow\",\n trigger_at=datetime.now(tz=timezone.utc) + timedelta(seconds=10),\n input={\n \"data\": \"simple-workflow-data\",\n },\n additional_metadata={\n \"customer_id\": \"customer-a\",\n },\n )\n\n scheduled_run.metadata.id # the id of the scheduled run trigger\n\n # > Delete\n await hatchet.scheduled.aio_delete(scheduled_id=scheduled_run.metadata.id)\n\n # > List\n await hatchet.scheduled.aio_list()\n\n # > Get\n scheduled_run = await hatchet.scheduled.aio_get(\n scheduled_id=scheduled_run.metadata.id\n )\n", - "source": "out/python/scheduled/programatic-async.py", - "blocks": { - "create": { - "start": 10, - "stop": 21 - }, - "delete": { - "start": 24, - "stop": 24 - }, - "list": { - "start": 27, - "stop": 27 - }, - "get": { - "start": 30, - "stop": 32 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/scheduled/programatic-sync.ts b/frontend/docs/lib/generated/snips/python/scheduled/programatic-sync.ts deleted file mode 100644 index 1740b807c..000000000 --- a/frontend/docs/lib/generated/snips/python/scheduled/programatic-sync.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from datetime import datetime, timedelta, timezone\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet()\n\n# > Create\nscheduled_run = hatchet.scheduled.create(\n workflow_name=\"simple-workflow\",\n trigger_at=datetime.now(tz=timezone.utc) + timedelta(seconds=10),\n input={\n \"data\": \"simple-workflow-data\",\n },\n additional_metadata={\n \"customer_id\": \"customer-a\",\n },\n)\n\nid = scheduled_run.metadata.id # the id of the scheduled run trigger\n\n# > Delete\nhatchet.scheduled.delete(scheduled_id=scheduled_run.metadata.id)\n\n# > List\nscheduled_runs = hatchet.scheduled.list()\n\n# > Get\nscheduled_run = hatchet.scheduled.get(scheduled_id=scheduled_run.metadata.id)\n", - "source": "out/python/scheduled/programatic-sync.py", - "blocks": { - "create": { - "start": 8, - "stop": 19 - }, - "delete": { - "start": 22, - "stop": 22 - }, - "list": { - "start": 25, - "stop": 25 - }, - "get": { - "start": 28, - "stop": 28 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/simple/index.ts b/frontend/docs/lib/generated/snips/python/simple/index.ts deleted file mode 100644 index 8bdb03230..000000000 --- a/frontend/docs/lib/generated/snips/python/simple/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_simple_workflow from './test_simple_workflow'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_simple_workflow } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/simple/test_simple_workflow.ts b/frontend/docs/lib/generated/snips/python/simple/test_simple_workflow.ts deleted file mode 100644 index 9fe37955a..000000000 --- a/frontend/docs/lib/generated/snips/python/simple/test_simple_workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import pytest\n\nfrom examples.simple.worker import simple, simple_durable\nfrom hatchet_sdk import EmptyModel\nfrom hatchet_sdk.runnables.workflow import Standalone\n\n\n@pytest.mark.parametrize(\"task\", [simple, simple_durable])\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_simple_workflow_running_options(\n task: Standalone[EmptyModel, dict[str, str]],\n) -> None:\n x1 = task.run()\n x2 = await task.aio_run()\n\n x3 = task.run_many([task.create_bulk_run_item()])[0]\n x4 = (await task.aio_run_many([task.create_bulk_run_item()]))[0]\n\n x5 = task.run_no_wait().result()\n x6 = (await task.aio_run_no_wait()).result()\n x7 = [x.result() for x in task.run_many_no_wait([task.create_bulk_run_item()])][0]\n x8 = [\n x.result()\n for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n\n x9 = await task.run_no_wait().aio_result()\n x10 = await (await task.aio_run_no_wait()).aio_result()\n x11 = [\n await x.aio_result()\n for x in task.run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n x12 = [\n await x.aio_result()\n for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n\n assert all(\n x == {\"result\": \"Hello, world!\"}\n for x in [x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12]\n )\n", - "source": "out/python/simple/test_simple_workflow.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/simple/trigger.ts b/frontend/docs/lib/generated/snips/python/simple/trigger.ts deleted file mode 100644 index 3927c5988..000000000 --- a/frontend/docs/lib/generated/snips/python/simple/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.simple.worker import simple\n\nsimple.run()\n", - "source": "out/python/simple/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/simple/worker.ts b/frontend/docs/lib/generated/snips/python/simple/worker.ts deleted file mode 100644 index 4789ffbab..000000000 --- a/frontend/docs/lib/generated/snips/python/simple/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Simple\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n@hatchet.task()\ndef simple(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {\"result\": \"Hello, world!\"}\n\n\n@hatchet.durable_task()\ndef simple_durable(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {\"result\": \"Hello, world!\"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\"test-worker\", workflows=[simple, simple_durable])\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/simple/worker.py", - "blocks": { - "simple": { - "start": 2, - "stop": 22 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/sticky_workers/event.ts b/frontend/docs/lib/generated/snips/python/sticky_workers/event.ts deleted file mode 100644 index 5a80a8d2e..000000000 --- a/frontend/docs/lib/generated/snips/python/sticky_workers/event.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.sticky_workers.worker import sticky_workflow\nfrom hatchet_sdk import TriggerWorkflowOptions\n\nsticky_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata={\"hello\": \"moon\"}),\n)\n", - "source": "out/python/sticky_workers/event.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/sticky_workers/index.ts b/frontend/docs/lib/generated/snips/python/sticky_workers/index.ts deleted file mode 100644 index 6c44274b5..000000000 --- a/frontend/docs/lib/generated/snips/python/sticky_workers/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import event from './event'; -import worker from './worker'; - -export { event } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/sticky_workers/worker.ts b/frontend/docs/lib/generated/snips/python/sticky_workers/worker.ts deleted file mode 100644 index 5a4956c70..000000000 --- a/frontend/docs/lib/generated/snips/python/sticky_workers/worker.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from hatchet_sdk import (\n Context,\n EmptyModel,\n Hatchet,\n StickyStrategy,\n TriggerWorkflowOptions,\n)\n\nhatchet = Hatchet(debug=True)\n\n# > StickyWorker\n\n\nsticky_workflow = hatchet.workflow(\n name=\"StickyWorkflow\",\n # 👀 Specify a sticky strategy when declaring the workflow\n sticky=StickyStrategy.SOFT,\n)\n\n\n@sticky_workflow.task()\ndef step1a(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n return {\"worker\": ctx.worker.id()}\n\n\n@sticky_workflow.task()\ndef step1b(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n return {\"worker\": ctx.worker.id()}\n\n\n\n# > StickyChild\n\nsticky_child_workflow = hatchet.workflow(\n name=\"StickyChildWorkflow\", sticky=StickyStrategy.SOFT\n)\n\n\n@sticky_workflow.task(parents=[step1a, step1b])\nasync def step2(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n ref = await sticky_child_workflow.aio_run_no_wait(\n options=TriggerWorkflowOptions(sticky=True)\n )\n\n await ref.aio_result()\n\n return {\"worker\": ctx.worker.id()}\n\n\n@sticky_child_workflow.task()\ndef child(input: EmptyModel, ctx: Context) -> dict[str, str | None]:\n return {\"worker\": ctx.worker.id()}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"sticky-worker\", slots=10, workflows=[sticky_workflow, sticky_child_workflow]\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/sticky_workers/worker.py", - "blocks": { - "stickyworker": { - "start": 12, - "stop": 30 - }, - "stickychild": { - "start": 33, - "stop": 54 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/streaming/async_stream.ts b/frontend/docs/lib/generated/snips/python/streaming/async_stream.ts deleted file mode 100644 index 2c5ba4f0f..000000000 --- a/frontend/docs/lib/generated/snips/python/streaming/async_stream.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\n\nfrom examples.streaming.worker import hatchet, stream_task\nfrom hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType\n\n\nasync def main() -> None:\n # > Consume\n ref = await stream_task.aio_run_no_wait()\n\n async for chunk in hatchet.runs.subscribe_to_stream(ref.workflow_run_id):\n print(chunk, flush=True, end=\"\")\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n", - "source": "out/python/streaming/async_stream.py", - "blocks": { - "consume": { - "start": 9, - "stop": 12 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/streaming/fastapi_proxy.ts b/frontend/docs/lib/generated/snips/python/streaming/fastapi_proxy.ts deleted file mode 100644 index 3bb384190..000000000 --- a/frontend/docs/lib/generated/snips/python/streaming/fastapi_proxy.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from typing import AsyncGenerator\n\nfrom fastapi import FastAPI\nfrom fastapi.responses import StreamingResponse\n\nfrom examples.streaming.worker import stream_task\nfrom hatchet_sdk import Hatchet\n\n# > FastAPI Proxy\nhatchet = Hatchet()\napp = FastAPI()\n\n\n@app.get(\"/stream\")\nasync def stream() -> StreamingResponse:\n ref = await stream_task.aio_run_no_wait()\n\n return StreamingResponse(\n hatchet.runs.subscribe_to_stream(ref.workflow_run_id), media_type=\"text/plain\"\n )\n\n\n\nif __name__ == \"__main__\":\n import uvicorn\n\n uvicorn.run(app, host=\"0.0.0.0\", port=8000)\n", - "source": "out/python/streaming/fastapi_proxy.py", - "blocks": { - "fastapi_proxy": { - "start": 10, - "stop": 22 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/streaming/index.ts b/frontend/docs/lib/generated/snips/python/streaming/index.ts deleted file mode 100644 index b790f0003..000000000 --- a/frontend/docs/lib/generated/snips/python/streaming/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import async_stream from './async_stream'; -import fastapi_proxy from './fastapi_proxy'; -import test_streaming from './test_streaming'; -import worker from './worker'; - -export { async_stream } -export { fastapi_proxy } -export { test_streaming } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/streaming/test_streaming.ts b/frontend/docs/lib/generated/snips/python/streaming/test_streaming.ts deleted file mode 100644 index 5360bd230..000000000 --- a/frontend/docs/lib/generated/snips/python/streaming/test_streaming.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from subprocess import Popen\nfrom typing import Any\n\nimport pytest\n\nfrom examples.streaming.worker import chunks, stream_task\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType\n\n\n@pytest.mark.parametrize(\n \"on_demand_worker\",\n [\n (\n [\"poetry\", \"run\", \"python\", \"examples/streaming/worker.py\", \"--slots\", \"1\"],\n 8008,\n )\n ],\n indirect=True,\n)\n@pytest.mark.parametrize(\"execution_number\", range(5)) # run test multiple times\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_streaming_ordering_and_completeness(\n execution_number: int,\n hatchet: Hatchet,\n on_demand_worker: Popen[Any],\n) -> None:\n ref = await stream_task.aio_run_no_wait()\n\n ix = 0\n anna_karenina = \"\"\n\n async for chunk in hatchet.runs.subscribe_to_stream(ref.workflow_run_id):\n assert chunks[ix] == chunk\n ix += 1\n anna_karenina += chunk\n\n assert ix == len(chunks)\n assert anna_karenina == \"\".join(chunks)\n\n await ref.aio_result()\n", - "source": "out/python/streaming/test_streaming.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/streaming/worker.ts b/frontend/docs/lib/generated/snips/python/streaming/worker.ts deleted file mode 100644 index 8216deca8..000000000 --- a/frontend/docs/lib/generated/snips/python/streaming/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom typing import Generator\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=False)\n\n# > Streaming\n\nanna_karenina = \"\"\"\nHappy families are all alike; every unhappy family is unhappy in its own way.\n\nEverything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him.\n\"\"\"\n\n\ndef create_chunks(content: str, n: int) -> Generator[str, None, None]:\n for i in range(0, len(content), n):\n yield content[i : i + n]\n\n\nchunks = list(create_chunks(anna_karenina, 10))\n\n\n@hatchet.task()\nasync def stream_task(input: EmptyModel, ctx: Context) -> None:\n # 👀 Sleeping to avoid race conditions\n await asyncio.sleep(2)\n\n for chunk in chunks:\n await ctx.aio_put_stream(chunk)\n await asyncio.sleep(0.20)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\"test-worker\", workflows=[stream_task])\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/streaming/worker.py", - "blocks": { - "streaming": { - "start": 9, - "stop": 34 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/timeout/index.ts b/frontend/docs/lib/generated/snips/python/timeout/index.ts deleted file mode 100644 index c7c15bf85..000000000 --- a/frontend/docs/lib/generated/snips/python/timeout/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import test_timeout from './test_timeout'; -import trigger from './trigger'; -import worker from './worker'; - -export { test_timeout } -export { trigger } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/timeout/test_timeout.ts b/frontend/docs/lib/generated/snips/python/timeout/test_timeout.ts deleted file mode 100644 index c63ae84e7..000000000 --- a/frontend/docs/lib/generated/snips/python/timeout/test_timeout.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import pytest\n\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_execution_timeout() -> None:\n run = timeout_wf.run_no_wait()\n\n with pytest.raises(\n Exception,\n match=\"(Task exceeded timeout|TIMED_OUT|Workflow run .* failed with multiple errors)\",\n ):\n await run.aio_result()\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run_refresh_timeout() -> None:\n result = await refresh_timeout_wf.aio_run()\n\n assert result[\"refresh_task\"][\"status\"] == \"success\"\n", - "source": "out/python/timeout/test_timeout.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/timeout/trigger.ts b/frontend/docs/lib/generated/snips/python/timeout/trigger.ts deleted file mode 100644 index af046a225..000000000 --- a/frontend/docs/lib/generated/snips/python/timeout/trigger.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.timeout.worker import refresh_timeout_wf, timeout_wf\n\ntimeout_wf.run()\nrefresh_timeout_wf.run()\n", - "source": "out/python/timeout/trigger.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/timeout/worker.ts b/frontend/docs/lib/generated/snips/python/timeout/worker.ts deleted file mode 100644 index 832c82fbc..000000000 --- a/frontend/docs/lib/generated/snips/python/timeout/worker.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import time\nfrom datetime import timedelta\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet, TaskDefaults\n\nhatchet = Hatchet(debug=True)\n\n# > ScheduleTimeout\ntimeout_wf = hatchet.workflow(\n name=\"TimeoutWorkflow\",\n task_defaults=TaskDefaults(execution_timeout=timedelta(minutes=2)),\n)\n\n\n# > ExecutionTimeout\n# 👀 Specify an execution timeout on a task\n@timeout_wf.task(\n execution_timeout=timedelta(seconds=5), schedule_timeout=timedelta(minutes=10)\n)\ndef timeout_task(input: EmptyModel, ctx: Context) -> dict[str, str]:\n time.sleep(30)\n return {\"status\": \"success\"}\n\n\n\nrefresh_timeout_wf = hatchet.workflow(name=\"RefreshTimeoutWorkflow\")\n\n\n# > RefreshTimeout\n@refresh_timeout_wf.task(execution_timeout=timedelta(seconds=4))\ndef refresh_task(input: EmptyModel, ctx: Context) -> dict[str, str]:\n ctx.refresh_timeout(timedelta(seconds=10))\n time.sleep(5)\n\n return {\"status\": \"success\"}\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"timeout-worker\", slots=4, workflows=[timeout_wf, refresh_timeout_wf]\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/timeout/worker.py", - "blocks": { - "scheduletimeout": { - "start": 9, - "stop": 12 - }, - "executiontimeout": { - "start": 16, - "stop": 24 - }, - "refreshtimeout": { - "start": 30, - "stop": 37 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/unit_testing/index.ts b/frontend/docs/lib/generated/snips/python/unit_testing/index.ts deleted file mode 100644 index f497eecd5..000000000 --- a/frontend/docs/lib/generated/snips/python/unit_testing/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_unit from './test_unit'; -import workflows from './workflows'; - -export { test_unit } -export { workflows } diff --git a/frontend/docs/lib/generated/snips/python/unit_testing/test_unit.ts b/frontend/docs/lib/generated/snips/python/unit_testing/test_unit.ts deleted file mode 100644 index 99eb7594b..000000000 --- a/frontend/docs/lib/generated/snips/python/unit_testing/test_unit.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import pytest\n\nfrom examples.unit_testing.workflows import (\n Lifespan,\n UnitTestInput,\n UnitTestOutput,\n async_complex_workflow,\n async_simple_workflow,\n async_standalone,\n durable_async_complex_workflow,\n durable_async_simple_workflow,\n durable_async_standalone,\n durable_sync_complex_workflow,\n durable_sync_simple_workflow,\n durable_sync_standalone,\n start,\n sync_complex_workflow,\n sync_simple_workflow,\n sync_standalone,\n)\nfrom hatchet_sdk import Task\n\n\n@pytest.mark.parametrize(\n \"func\",\n [\n sync_standalone,\n durable_sync_standalone,\n sync_simple_workflow,\n durable_sync_simple_workflow,\n sync_complex_workflow,\n durable_sync_complex_workflow,\n ],\n)\ndef test_simple_unit_sync(func: Task[UnitTestInput, UnitTestOutput]) -> None:\n input = UnitTestInput(key=\"test_key\", number=42)\n additional_metadata = {\"meta_key\": \"meta_value\"}\n lifespan = Lifespan(mock_db_url=\"sqlite:///:memory:\")\n retry_count = 1\n\n expected_output = UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=additional_metadata,\n retry_count=retry_count,\n mock_db_url=lifespan.mock_db_url,\n )\n\n assert (\n func.mock_run(\n input=input,\n additional_metadata=additional_metadata,\n lifespan=lifespan,\n retry_count=retry_count,\n parent_outputs={start.name: expected_output.model_dump()},\n )\n == expected_output\n )\n\n\n@pytest.mark.parametrize(\n \"func\",\n [\n async_standalone,\n durable_async_standalone,\n async_simple_workflow,\n durable_async_simple_workflow,\n async_complex_workflow,\n durable_async_complex_workflow,\n ],\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_simple_unit_async(func: Task[UnitTestInput, UnitTestOutput]) -> None:\n input = UnitTestInput(key=\"test_key\", number=42)\n additional_metadata = {\"meta_key\": \"meta_value\"}\n lifespan = Lifespan(mock_db_url=\"sqlite:///:memory:\")\n retry_count = 1\n\n expected_output = UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=additional_metadata,\n retry_count=retry_count,\n mock_db_url=lifespan.mock_db_url,\n )\n\n assert (\n await func.aio_mock_run(\n input=input,\n additional_metadata=additional_metadata,\n lifespan=lifespan,\n retry_count=retry_count,\n parent_outputs={start.name: expected_output.model_dump()},\n )\n == expected_output\n )\n", - "source": "out/python/unit_testing/test_unit.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/unit_testing/workflows.ts b/frontend/docs/lib/generated/snips/python/unit_testing/workflows.ts deleted file mode 100644 index be793957e..000000000 --- a/frontend/docs/lib/generated/snips/python/unit_testing/workflows.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from typing import cast\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, DurableContext, EmptyModel, Hatchet\n\n\nclass UnitTestInput(BaseModel):\n key: str\n number: int\n\n\nclass Lifespan(BaseModel):\n mock_db_url: str\n\n\nclass UnitTestOutput(UnitTestInput, Lifespan):\n additional_metadata: dict[str, str]\n retry_count: int\n\n\nhatchet = Hatchet()\n\n\n@hatchet.task(input_validator=UnitTestInput)\ndef sync_standalone(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@hatchet.task(input_validator=UnitTestInput)\nasync def async_standalone(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@hatchet.durable_task(input_validator=UnitTestInput)\ndef durable_sync_standalone(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@hatchet.durable_task(input_validator=UnitTestInput)\nasync def durable_async_standalone(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\nsimple_workflow = hatchet.workflow(\n name=\"simple-unit-test-workflow\", input_validator=UnitTestInput\n)\n\n\n@simple_workflow.task()\ndef sync_simple_workflow(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@simple_workflow.task()\nasync def async_simple_workflow(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@simple_workflow.durable_task()\ndef durable_sync_simple_workflow(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@simple_workflow.durable_task()\nasync def durable_async_simple_workflow(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\ncomplex_workflow = hatchet.workflow(\n name=\"complex-unit-test-workflow\", input_validator=UnitTestInput\n)\n\n\n@complex_workflow.task()\nasync def start(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return UnitTestOutput(\n key=input.key,\n number=input.number,\n additional_metadata=ctx.additional_metadata,\n retry_count=ctx.retry_count,\n mock_db_url=cast(Lifespan, ctx.lifespan).mock_db_url,\n )\n\n\n@complex_workflow.task(\n parents=[start],\n)\ndef sync_complex_workflow(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return ctx.task_output(start)\n\n\n@complex_workflow.task(\n parents=[start],\n)\nasync def async_complex_workflow(input: UnitTestInput, ctx: Context) -> UnitTestOutput:\n return ctx.task_output(start)\n\n\n@complex_workflow.durable_task(\n parents=[start],\n)\ndef durable_sync_complex_workflow(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return ctx.task_output(start)\n\n\n@complex_workflow.durable_task(\n parents=[start],\n)\nasync def durable_async_complex_workflow(\n input: UnitTestInput, ctx: DurableContext\n) -> UnitTestOutput:\n return ctx.task_output(start)\n", - "source": "out/python/unit_testing/workflows.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/webhooks/index.ts b/frontend/docs/lib/generated/snips/python/webhooks/index.ts deleted file mode 100644 index a43e7cad0..000000000 --- a/frontend/docs/lib/generated/snips/python/webhooks/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import test_webhooks from './test_webhooks'; -import worker from './worker'; - -export { test_webhooks } -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/webhooks/test_webhooks.ts b/frontend/docs/lib/generated/snips/python/webhooks/test_webhooks.ts deleted file mode 100644 index 7743b1df9..000000000 --- a/frontend/docs/lib/generated/snips/python/webhooks/test_webhooks.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nimport base64\nimport hashlib\nimport hmac\nimport json\nfrom collections.abc import AsyncGenerator\nfrom contextlib import asynccontextmanager\nfrom datetime import datetime, timezone\nfrom typing import Any\nfrom uuid import uuid4\n\nimport aiohttp\nimport pytest\n\nfrom examples.webhooks.worker import WebhookInput\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.api.webhook_api import WebhookApi\nfrom hatchet_sdk.clients.rest.models.v1_create_webhook_request import (\n V1CreateWebhookRequest,\n)\nfrom hatchet_sdk.clients.rest.models.v1_create_webhook_request_api_key import (\n V1CreateWebhookRequestAPIKey,\n)\nfrom hatchet_sdk.clients.rest.models.v1_create_webhook_request_basic_auth import (\n V1CreateWebhookRequestBasicAuth,\n)\nfrom hatchet_sdk.clients.rest.models.v1_create_webhook_request_hmac import (\n V1CreateWebhookRequestHMAC,\n)\nfrom hatchet_sdk.clients.rest.models.v1_event import V1Event\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\nfrom hatchet_sdk.clients.rest.models.v1_webhook import V1Webhook\nfrom hatchet_sdk.clients.rest.models.v1_webhook_api_key_auth import V1WebhookAPIKeyAuth\nfrom hatchet_sdk.clients.rest.models.v1_webhook_basic_auth import V1WebhookBasicAuth\nfrom hatchet_sdk.clients.rest.models.v1_webhook_hmac_algorithm import (\n V1WebhookHMACAlgorithm,\n)\nfrom hatchet_sdk.clients.rest.models.v1_webhook_hmac_auth import V1WebhookHMACAuth\nfrom hatchet_sdk.clients.rest.models.v1_webhook_hmac_encoding import (\n V1WebhookHMACEncoding,\n)\nfrom hatchet_sdk.clients.rest.models.v1_webhook_source_name import V1WebhookSourceName\n\nTEST_BASIC_USERNAME = \"test_user\"\nTEST_BASIC_PASSWORD = \"test_password\"\nTEST_API_KEY_HEADER = \"X-API-Key\"\nTEST_API_KEY_VALUE = \"test_api_key_123\"\nTEST_HMAC_SIGNATURE_HEADER = \"X-Signature\"\nTEST_HMAC_SECRET = \"test_hmac_secret\"\n\n\n@pytest.fixture\ndef webhook_body() -> WebhookInput:\n return WebhookInput(type=\"test\", message=\"Hello, world!\")\n\n\n@pytest.fixture\ndef test_run_id() -> str:\n return str(uuid4())\n\n\n@pytest.fixture\ndef test_start() -> datetime:\n return datetime.now(timezone.utc)\n\n\ndef create_hmac_signature(\n payload: bytes,\n secret: str,\n algorithm: V1WebhookHMACAlgorithm = V1WebhookHMACAlgorithm.SHA256,\n encoding: V1WebhookHMACEncoding = V1WebhookHMACEncoding.HEX,\n) -> str:\n algorithm_map = {\n V1WebhookHMACAlgorithm.SHA1: hashlib.sha1,\n V1WebhookHMACAlgorithm.SHA256: hashlib.sha256,\n V1WebhookHMACAlgorithm.SHA512: hashlib.sha512,\n V1WebhookHMACAlgorithm.MD5: hashlib.md5,\n }\n\n hash_func = algorithm_map[algorithm]\n signature = hmac.new(secret.encode(), payload, hash_func).digest()\n\n if encoding == V1WebhookHMACEncoding.HEX:\n return signature.hex()\n if encoding == V1WebhookHMACEncoding.BASE64:\n return base64.b64encode(signature).decode()\n if encoding == V1WebhookHMACEncoding.BASE64URL:\n return base64.urlsafe_b64encode(signature).decode()\n\n raise ValueError(f\"Unsupported encoding: {encoding}\")\n\n\nasync def send_webhook_request(\n url: str,\n body: WebhookInput,\n auth_type: str,\n auth_data: dict[str, Any] | None = None,\n headers: dict[str, str] | None = None,\n) -> aiohttp.ClientResponse:\n request_headers = headers or {}\n auth = None\n\n if auth_type == \"BASIC\" and auth_data:\n auth = aiohttp.BasicAuth(auth_data[\"username\"], auth_data[\"password\"])\n elif auth_type == \"API_KEY\" and auth_data:\n request_headers[auth_data[\"header_name\"]] = auth_data[\"api_key\"]\n elif auth_type == \"HMAC\" and auth_data:\n payload = json.dumps(body.model_dump()).encode()\n signature = create_hmac_signature(\n payload,\n auth_data[\"secret\"],\n auth_data.get(\"algorithm\", V1WebhookHMACAlgorithm.SHA256),\n auth_data.get(\"encoding\", V1WebhookHMACEncoding.HEX),\n )\n request_headers[auth_data[\"header_name\"]] = signature\n\n async with aiohttp.ClientSession() as session:\n return await session.post(\n url, json=body.model_dump(), auth=auth, headers=request_headers\n )\n\n\nasync def wait_for_event(\n hatchet: Hatchet,\n webhook_name: str,\n test_start: datetime,\n) -> V1Event | None:\n await asyncio.sleep(5)\n\n events = await hatchet.event.aio_list(since=test_start)\n\n if events.rows is None:\n return None\n\n return next(\n (\n event\n for event in events.rows\n if event.triggering_webhook_name == webhook_name\n ),\n None,\n )\n\n\nasync def wait_for_workflow_run(\n hatchet: Hatchet, event_id: str, test_start: datetime\n) -> V1TaskSummary | None:\n await asyncio.sleep(5)\n\n runs = await hatchet.runs.aio_list(\n since=test_start,\n additional_metadata={\n \"hatchet__event_id\": event_id,\n },\n )\n\n if len(runs.rows) == 0:\n return None\n\n return runs.rows[0]\n\n\n@asynccontextmanager\nasync def basic_auth_webhook(\n hatchet: Hatchet,\n test_run_id: str,\n username: str = TEST_BASIC_USERNAME,\n password: str = TEST_BASIC_PASSWORD,\n source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,\n) -> AsyncGenerator[V1Webhook, None]:\n ## Hack to get the API client\n client = hatchet.metrics.client()\n webhook_api = WebhookApi(client)\n\n webhook_request = V1CreateWebhookRequestBasicAuth(\n sourceName=source_name,\n name=f\"test-webhook-basic-{test_run_id}\",\n eventKeyExpression=f\"'{hatchet.config.apply_namespace('webhook')}:' + input.type\",\n authType=\"BASIC\",\n auth=V1WebhookBasicAuth(\n username=username,\n password=password,\n ),\n )\n\n incoming_webhook = webhook_api.v1_webhook_create(\n tenant=hatchet.tenant_id,\n v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),\n )\n\n try:\n yield incoming_webhook\n finally:\n webhook_api.v1_webhook_delete(\n tenant=hatchet.tenant_id,\n v1_webhook=incoming_webhook.name,\n )\n\n\n@asynccontextmanager\nasync def api_key_webhook(\n hatchet: Hatchet,\n test_run_id: str,\n header_name: str = TEST_API_KEY_HEADER,\n api_key: str = TEST_API_KEY_VALUE,\n source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,\n) -> AsyncGenerator[V1Webhook, None]:\n client = hatchet.metrics.client()\n webhook_api = WebhookApi(client)\n\n webhook_request = V1CreateWebhookRequestAPIKey(\n sourceName=source_name,\n name=f\"test-webhook-apikey-{test_run_id}\",\n eventKeyExpression=f\"'{hatchet.config.apply_namespace('webhook')}:' + input.type\",\n authType=\"API_KEY\",\n auth=V1WebhookAPIKeyAuth(\n headerName=header_name,\n apiKey=api_key,\n ),\n )\n\n incoming_webhook = webhook_api.v1_webhook_create(\n tenant=hatchet.tenant_id,\n v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),\n )\n\n try:\n yield incoming_webhook\n finally:\n webhook_api.v1_webhook_delete(\n tenant=hatchet.tenant_id,\n v1_webhook=incoming_webhook.name,\n )\n\n\n@asynccontextmanager\nasync def hmac_webhook(\n hatchet: Hatchet,\n test_run_id: str,\n signature_header_name: str = TEST_HMAC_SIGNATURE_HEADER,\n signing_secret: str = TEST_HMAC_SECRET,\n algorithm: V1WebhookHMACAlgorithm = V1WebhookHMACAlgorithm.SHA256,\n encoding: V1WebhookHMACEncoding = V1WebhookHMACEncoding.HEX,\n source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,\n) -> AsyncGenerator[V1Webhook, None]:\n client = hatchet.metrics.client()\n webhook_api = WebhookApi(client)\n\n webhook_request = V1CreateWebhookRequestHMAC(\n sourceName=source_name,\n name=f\"test-webhook-hmac-{test_run_id}\",\n eventKeyExpression=f\"'{hatchet.config.apply_namespace('webhook')}:' + input.type\",\n authType=\"HMAC\",\n auth=V1WebhookHMACAuth(\n algorithm=algorithm,\n encoding=encoding,\n signatureHeaderName=signature_header_name,\n signingSecret=signing_secret,\n ),\n )\n\n incoming_webhook = webhook_api.v1_webhook_create(\n tenant=hatchet.tenant_id,\n v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),\n )\n\n try:\n yield incoming_webhook\n finally:\n webhook_api.v1_webhook_delete(\n tenant=hatchet.tenant_id,\n v1_webhook=incoming_webhook.name,\n )\n\n\ndef url(tenant_id: str, webhook_name: str) -> str:\n return f\"http://localhost:8080/api/v1/stable/tenants/{tenant_id}/webhooks/{webhook_name}\"\n\n\nasync def assert_has_runs(\n hatchet: Hatchet,\n test_start: datetime,\n webhook_body: WebhookInput,\n incoming_webhook: V1Webhook,\n) -> None:\n triggered_event = await wait_for_event(hatchet, incoming_webhook.name, test_start)\n assert triggered_event is not None\n assert (\n triggered_event.key\n == f\"{hatchet.config.apply_namespace('webhook')}:{webhook_body.type}\"\n )\n assert triggered_event.payload == webhook_body.model_dump()\n\n workflow_run = await wait_for_workflow_run(\n hatchet, triggered_event.metadata.id, test_start\n )\n assert workflow_run is not None\n assert workflow_run.status == V1TaskStatus.COMPLETED\n assert workflow_run.additional_metadata is not None\n\n assert (\n workflow_run.additional_metadata[\"hatchet__event_id\"]\n == triggered_event.metadata.id\n )\n assert workflow_run.additional_metadata[\"hatchet__event_key\"] == triggered_event.key\n assert workflow_run.status == V1TaskStatus.COMPLETED\n\n\nasync def assert_event_not_created(\n hatchet: Hatchet,\n test_start: datetime,\n incoming_webhook: V1Webhook,\n) -> None:\n triggered_event = await wait_for_event(hatchet, incoming_webhook.name, test_start)\n assert triggered_event is None\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_basic_auth_success(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n \"BASIC\",\n {\"username\": TEST_BASIC_USERNAME, \"password\": TEST_BASIC_PASSWORD},\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {\"message\": \"ok\"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n \"username,password\",\n [\n (\"test_user\", \"incorrect_password\"),\n (\"incorrect_user\", \"test_password\"),\n (\"incorrect_user\", \"incorrect_password\"),\n (\"\", \"\"),\n ],\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_basic_auth_failure(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n username: str,\n password: str,\n) -> None:\n \"\"\"Test basic authentication failures.\"\"\"\n async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n \"BASIC\",\n {\"username\": username, \"password\": password},\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_basic_auth_missing_credentials(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with basic_auth_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name), webhook_body, \"NONE\"\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_api_key_success(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with api_key_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n \"API_KEY\",\n {\"header_name\": TEST_API_KEY_HEADER, \"api_key\": TEST_API_KEY_VALUE},\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {\"message\": \"ok\"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n \"api_key\",\n [\n \"incorrect_api_key\",\n \"\",\n \"partial_key\",\n ],\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_api_key_failure(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n api_key: str,\n) -> None:\n async with api_key_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n \"API_KEY\",\n {\"header_name\": TEST_API_KEY_HEADER, \"api_key\": api_key},\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_api_key_missing_header(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with api_key_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name), webhook_body, \"NONE\"\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_hmac_success(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with hmac_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n \"HMAC\",\n {\n \"header_name\": TEST_HMAC_SIGNATURE_HEADER,\n \"secret\": TEST_HMAC_SECRET,\n \"algorithm\": V1WebhookHMACAlgorithm.SHA256,\n \"encoding\": V1WebhookHMACEncoding.HEX,\n },\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {\"message\": \"ok\"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n \"algorithm,encoding\",\n [\n (V1WebhookHMACAlgorithm.SHA1, V1WebhookHMACEncoding.HEX),\n (V1WebhookHMACAlgorithm.SHA256, V1WebhookHMACEncoding.BASE64),\n (V1WebhookHMACAlgorithm.SHA512, V1WebhookHMACEncoding.BASE64URL),\n (V1WebhookHMACAlgorithm.MD5, V1WebhookHMACEncoding.HEX),\n ],\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_hmac_different_algorithms_and_encodings(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n algorithm: V1WebhookHMACAlgorithm,\n encoding: V1WebhookHMACEncoding,\n) -> None:\n async with hmac_webhook(\n hatchet, test_run_id, algorithm=algorithm, encoding=encoding\n ) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n \"HMAC\",\n {\n \"header_name\": TEST_HMAC_SIGNATURE_HEADER,\n \"secret\": TEST_HMAC_SECRET,\n \"algorithm\": algorithm,\n \"encoding\": encoding,\n },\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {\"message\": \"ok\"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n \"secret\",\n [\n \"incorrect_secret\",\n \"\",\n \"partial_secret\",\n ],\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_hmac_signature_failure(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n secret: str,\n) -> None:\n async with hmac_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n \"HMAC\",\n {\n \"header_name\": TEST_HMAC_SIGNATURE_HEADER,\n \"secret\": secret,\n \"algorithm\": V1WebhookHMACAlgorithm.SHA256,\n \"encoding\": V1WebhookHMACEncoding.HEX,\n },\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_hmac_missing_signature_header(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n) -> None:\n async with hmac_webhook(hatchet, test_run_id) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name), webhook_body, \"NONE\"\n ) as response:\n assert response.status == 403\n\n await assert_event_not_created(\n hatchet,\n test_start,\n incoming_webhook,\n )\n\n\n@pytest.mark.parametrize(\n \"source_name\",\n [\n V1WebhookSourceName.GENERIC,\n V1WebhookSourceName.GITHUB,\n ],\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_different_source_types(\n hatchet: Hatchet,\n test_run_id: str,\n test_start: datetime,\n webhook_body: WebhookInput,\n source_name: V1WebhookSourceName,\n) -> None:\n async with basic_auth_webhook(\n hatchet, test_run_id, source_name=source_name\n ) as incoming_webhook:\n async with await send_webhook_request(\n url(hatchet.tenant_id, incoming_webhook.name),\n webhook_body,\n \"BASIC\",\n {\"username\": TEST_BASIC_USERNAME, \"password\": TEST_BASIC_PASSWORD},\n ) as response:\n assert response.status == 200\n data = await response.json()\n assert data == {\"message\": \"ok\"}\n\n await assert_has_runs(\n hatchet,\n test_start,\n webhook_body,\n incoming_webhook,\n )\n", - "source": "out/python/webhooks/test_webhooks.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/webhooks/worker.ts b/frontend/docs/lib/generated/snips/python/webhooks/worker.ts deleted file mode 100644 index f060c334b..000000000 --- a/frontend/docs/lib/generated/snips/python/webhooks/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > Webhooks\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass WebhookInput(BaseModel):\n type: str\n message: str\n\n\n@hatchet.task(input_validator=WebhookInput, on_events=[\"webhook:test\"])\ndef webhook(input: WebhookInput, ctx: Context) -> dict[str, str]:\n return input.model_dump()\n\n\ndef main() -> None:\n worker = hatchet.worker(\"webhook-worker\", workflows=[webhook])\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/webhooks/worker.py", - "blocks": { - "webhooks": { - "start": 2, - "stop": 24 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/worker.ts b/frontend/docs/lib/generated/snips/python/worker.ts deleted file mode 100644 index 3ba9e8d7a..000000000 --- a/frontend/docs/lib/generated/snips/python/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "from examples.affinity_workers.worker import affinity_worker_workflow\nfrom examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf\nfrom examples.bulk_operations.worker import (\n bulk_replay_test_1,\n bulk_replay_test_2,\n bulk_replay_test_3,\n)\nfrom examples.cancellation.worker import cancellation_workflow\nfrom examples.concurrency_limit.worker import concurrency_limit_workflow\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow\nfrom examples.concurrency_workflow_level.worker import (\n concurrency_workflow_level_workflow,\n)\nfrom examples.conditions.worker import task_condition_workflow\nfrom examples.dag.worker import dag_workflow\nfrom examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf\nfrom examples.dependency_injection.worker import (\n async_task_with_dependencies,\n di_workflow,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n sync_task_with_dependencies,\n)\nfrom examples.durable.worker import durable_workflow, wait_for_sleep_twice\nfrom examples.events.worker import event_workflow\nfrom examples.fanout.worker import child_wf, parent_wf\nfrom examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent\nfrom examples.lifespans.simple import lifespan, lifespan_task\nfrom examples.logger.workflow import logging_workflow\nfrom examples.non_retryable.worker import non_retryable_workflow\nfrom examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details\nfrom examples.return_exceptions.worker import return_exceptions_task\nfrom examples.simple.worker import simple, simple_durable\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\nfrom examples.webhooks.worker import webhook\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"e2e-test-worker\",\n slots=100,\n workflows=[\n affinity_worker_workflow,\n bulk_child_wf,\n bulk_parent_wf,\n concurrency_limit_workflow,\n concurrency_limit_rr_workflow,\n concurrency_multiple_keys_workflow,\n dag_workflow,\n dedupe_child_wf,\n dedupe_parent_wf,\n durable_workflow,\n child_wf,\n event_workflow,\n parent_wf,\n on_failure_wf,\n on_failure_wf_with_details,\n logging_workflow,\n timeout_wf,\n refresh_timeout_wf,\n task_condition_workflow,\n cancellation_workflow,\n sync_fanout_parent,\n sync_fanout_child,\n non_retryable_workflow,\n concurrency_workflow_level_workflow,\n di_workflow,\n lifespan_task,\n simple,\n simple_durable,\n bulk_replay_test_1,\n bulk_replay_test_2,\n bulk_replay_test_3,\n webhook,\n return_exceptions_task,\n wait_for_sleep_twice,\n async_task_with_dependencies,\n sync_task_with_dependencies,\n durable_async_task_with_dependencies,\n durable_sync_task_with_dependencies,\n ],\n lifespan=lifespan,\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/worker_existing_loop/index.ts b/frontend/docs/lib/generated/snips/python/worker_existing_loop/index.ts deleted file mode 100644 index 30b1adbeb..000000000 --- a/frontend/docs/lib/generated/snips/python/worker_existing_loop/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/worker_existing_loop/worker.ts b/frontend/docs/lib/generated/snips/python/worker_existing_loop/worker.ts deleted file mode 100644 index 9586792df..000000000 --- a/frontend/docs/lib/generated/snips/python/worker_existing_loop/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "import asyncio\nfrom contextlib import suppress\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\nexisting_loop_worker = hatchet.workflow(name=\"WorkerExistingLoopWorkflow\")\n\n\n@existing_loop_worker.task()\nasync def task(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\"started\")\n await asyncio.sleep(10)\n print(\"finished\")\n return {\"result\": \"returned result\"}\n\n\nasync def async_main() -> None:\n worker = None\n try:\n worker = hatchet.worker(\n \"test-worker\", slots=1, workflows=[existing_loop_worker]\n )\n worker.start()\n\n ref = existing_loop_worker.run_no_wait()\n print(await ref.aio_result())\n while True:\n await asyncio.sleep(1)\n finally:\n if worker:\n await worker.exit_gracefully()\n\n\ndef main() -> None:\n with suppress(KeyboardInterrupt):\n asyncio.run(async_main())\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/worker_existing_loop/worker.py", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/workflow_registration/index.ts b/frontend/docs/lib/generated/snips/python/workflow_registration/index.ts deleted file mode 100644 index 30b1adbeb..000000000 --- a/frontend/docs/lib/generated/snips/python/workflow_registration/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import worker from './worker'; - -export { worker } diff --git a/frontend/docs/lib/generated/snips/python/workflow_registration/worker.ts b/frontend/docs/lib/generated/snips/python/workflow_registration/worker.ts deleted file mode 100644 index 4f5378746..000000000 --- a/frontend/docs/lib/generated/snips/python/workflow_registration/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "python", - "content": "# > WorkflowRegistration\n\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\nwf_one = hatchet.workflow(name=\"wf_one\")\nwf_two = hatchet.workflow(name=\"wf_two\")\nwf_three = hatchet.workflow(name=\"wf_three\")\nwf_four = hatchet.workflow(name=\"wf_four\")\nwf_five = hatchet.workflow(name=\"wf_five\")\n\n# define tasks here\n\n\ndef main() -> None:\n # 👀 Register workflows directly when instantiating the worker\n worker = hatchet.worker(\"test-worker\", slots=1, workflows=[wf_one, wf_two])\n\n # 👀 Register a single workflow after instantiating the worker\n worker.register_workflow(wf_three)\n\n # 👀 Register multiple workflows in bulk after instantiating the worker\n worker.register_workflows([wf_four, wf_five])\n\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n", - "source": "out/python/workflow_registration/worker.py", - "blocks": { - "workflowregistration": { - "start": 2, - "stop": 28 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/types.ts b/frontend/docs/lib/generated/snips/types.ts deleted file mode 100644 index b85269595..000000000 --- a/frontend/docs/lib/generated/snips/types.ts +++ /dev/null @@ -1,31 +0,0 @@ -export type Highlight = { - lines: number[]; - strings: string[]; -}; - -export type Block = { - start: number; - stop: number; -}; - -// Types for snippets -export type Snippet = { - content: string; - language: string; - source: string; - blocks?: { - [key: string]: Block; - }; - highlights?: { - [key: string]: Highlight; - }; -}; - -export const LANGUAGE_MAP = { - ts: 'typescript ', - py: 'python', - go: 'go', - unknown: 'unknown', -}; - -export default {}; diff --git a/frontend/docs/lib/generated/snips/typescript/cancellations/index.ts b/frontend/docs/lib/generated/snips/typescript/cancellations/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/cancellations/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/cancellations/run.ts b/frontend/docs/lib/generated/snips/typescript/cancellations/run.ts deleted file mode 100644 index a87a9ffc8..000000000 --- a/frontend/docs/lib/generated/snips/typescript/cancellations/run.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Running a Task with Results\nimport sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { cancellation } from './workflow';\nimport { hatchet } from '../hatchet-client';\n// ...\nasync function main() {\n const run = await cancellation.runNoWait({});\n const run1 = await cancellation.runNoWait({});\n\n await sleep(1000);\n\n await run.cancel();\n\n const res = await run.output;\n const res1 = await run1.output;\n\n console.log('canceled', res);\n console.log('completed', res1);\n\n await sleep(1000);\n\n await run.replay();\n\n const resReplay = await run.output;\n\n console.log(resReplay);\n\n const run2 = await cancellation.runNoWait({}, { additionalMetadata: { test: 'abc' } });\n const run4 = await cancellation.runNoWait({}, { additionalMetadata: { test: 'test' } });\n\n await sleep(1000);\n\n await hatchet.runs.cancel({\n filters: {\n since: new Date(Date.now() - 60 * 60),\n additionalMetadata: { test: 'test' },\n },\n });\n\n const res3 = await Promise.all([run2.output, run4.output]);\n console.log(res3);\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/cancellations/run.ts", - "blocks": { - "running_a_task_with_results": { - "start": 2, - "stop": 41 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/cancellations/worker.ts b/frontend/docs/lib/generated/snips/typescript/cancellations/worker.ts deleted file mode 100644 index dfbd23f91..000000000 --- a/frontend/docs/lib/generated/snips/typescript/cancellations/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { cancellation } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('cancellation-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [cancellation],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 100,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/cancellations/worker.ts", - "blocks": { - "declaring_a_worker": { - "start": 2, - "stop": 18 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/cancellations/workflow.ts b/frontend/docs/lib/generated/snips/typescript/cancellations/workflow.ts deleted file mode 100644 index 42f8d1e0e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/cancellations/workflow.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport axios from 'axios';\nimport { hatchet } from '../hatchet-client';\n\n// > Declaring a Task\nexport const cancellation = hatchet.task({\n name: 'cancellation',\n fn: async (_, ctx) => {\n await sleep(10 * 1000);\n\n if (ctx.cancelled) {\n throw new Error('Task was cancelled');\n }\n\n return {\n Completed: true,\n };\n },\n});\n\n// > Abort Signal\nexport const abortSignal = hatchet.task({\n name: 'abort-signal',\n fn: async (_, { abortController }) => {\n try {\n const response = await axios.get('https://api.example.com/data', {\n signal: abortController.signal,\n });\n // Handle the response\n } catch (error) {\n if (axios.isCancel(error)) {\n // Request was canceled\n console.log('Request canceled');\n } else {\n // Handle other errors\n }\n }\n },\n});\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - "source": "out/typescript/cancellations/workflow.ts", - "blocks": { - "declaring_a_task": { - "start": 6, - "stop": 19 - }, - "abort_signal": { - "start": 22, - "stop": 39 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/child_workflows/index.ts b/frontend/docs/lib/generated/snips/typescript/child_workflows/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/child_workflows/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/child_workflows/run.ts b/frontend/docs/lib/generated/snips/typescript/child_workflows/run.ts deleted file mode 100644 index b8b724aa1..000000000 --- a/frontend/docs/lib/generated/snips/typescript/child_workflows/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { parent } from './workflow';\n\nasync function main() {\n const res = await parent.run({\n N: 10,\n });\n\n console.log(res.Result);\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - "source": "out/typescript/child_workflows/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/child_workflows/worker.ts b/frontend/docs/lib/generated/snips/typescript/child_workflows/worker.ts deleted file mode 100644 index e02431114..000000000 --- a/frontend/docs/lib/generated/snips/typescript/child_workflows/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { parent, child } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('child-workflow-worker', {\n workflows: [parent, child],\n slots: 100,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/child_workflows/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/child_workflows/workflow.ts b/frontend/docs/lib/generated/snips/typescript/child_workflows/workflow.ts deleted file mode 100644 index b506fa54b..000000000 --- a/frontend/docs/lib/generated/snips/typescript/child_workflows/workflow.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring a Child\nimport { hatchet } from '../hatchet-client';\n\ntype ChildInput = {\n N: number;\n};\n\nexport const child = hatchet.task({\n name: 'child',\n fn: (input: ChildInput) => {\n return {\n Value: input.N,\n };\n },\n});\n\n// > Declaring a Parent\n\ntype ParentInput = {\n N: number;\n};\n\nexport const parent = hatchet.task({\n name: 'parent',\n fn: async (input: ParentInput, ctx) => {\n const n = input.N;\n const promises = [];\n\n for (let i = 0; i < n; i++) {\n promises.push(child.run({ N: i }));\n }\n\n const childRes = await Promise.all(promises);\n const sum = childRes.reduce((acc, curr) => acc + curr.Value, 0);\n\n return {\n Result: sum,\n };\n },\n});\n", - "source": "out/typescript/child_workflows/workflow.ts", - "blocks": { - "declaring_a_child": { - "start": 2, - "stop": 15 - }, - "declaring_a_parent": { - "start": 18, - "stop": 40 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/index.ts b/frontend/docs/lib/generated/snips/typescript/concurrency-rr/index.ts deleted file mode 100644 index 4a96a22ef..000000000 --- a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import load from './load'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { load } -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/load.ts b/frontend/docs/lib/generated/snips/typescript/concurrency-rr/load.ts deleted file mode 100644 index e6f21edb2..000000000 --- a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/load.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simpleConcurrency } from './workflow';\n\nfunction generateRandomString(length: number): string {\n const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';\n let result = '';\n for (let i = 0; i < length; i++) {\n result += characters.charAt(Math.floor(Math.random() * characters.length));\n }\n return result;\n}\n\nasync function main() {\n const groupCount = 2;\n const runsPerGroup = 20_000;\n const BATCH_SIZE = 400;\n\n const workflowRuns = [];\n for (let i = 0; i < groupCount; i++) {\n for (let j = 0; j < runsPerGroup; j++) {\n workflowRuns.push({\n workflowName: simpleConcurrency.definition.name,\n input: {\n Message: generateRandomString(10),\n GroupKey: `group-${i}`,\n },\n });\n }\n }\n\n // Shuffle the workflow runs array\n for (let i = workflowRuns.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [workflowRuns[i], workflowRuns[j]] = [workflowRuns[j], workflowRuns[i]];\n }\n\n // Process workflows in batches\n for (let i = 0; i < workflowRuns.length; i += BATCH_SIZE) {\n const batch = workflowRuns.slice(i, i + BATCH_SIZE);\n await hatchet.admin.runWorkflows(batch);\n }\n}\n\nif (require.main === module) {\n main().then(() => process.exit(0));\n}\n", - "source": "out/typescript/concurrency-rr/load.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/run.ts b/frontend/docs/lib/generated/snips/typescript/concurrency-rr/run.ts deleted file mode 100644 index 6a73c8dd5..000000000 --- a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { simpleConcurrency } from './workflow';\n\nasync function main() {\n const res = await simpleConcurrency.run([\n {\n Message: 'Hello World',\n GroupKey: 'A',\n },\n {\n Message: 'Goodbye Moon',\n GroupKey: 'A',\n },\n {\n Message: 'Hello World B',\n GroupKey: 'B',\n },\n ]);\n\n console.log(res[0]['to-lower'].TransformedMessage);\n console.log(res[1]['to-lower'].TransformedMessage);\n console.log(res[2]['to-lower'].TransformedMessage);\n}\n\nif (require.main === module) {\n main().then(() => process.exit(0));\n}\n", - "source": "out/typescript/concurrency-rr/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/worker.ts b/frontend/docs/lib/generated/snips/typescript/concurrency-rr/worker.ts deleted file mode 100644 index cac801352..000000000 --- a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simpleConcurrency } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('simple-concurrency-worker', {\n workflows: [simpleConcurrency],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/concurrency-rr/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/workflow.ts b/frontend/docs/lib/generated/snips/typescript/concurrency-rr/workflow.ts deleted file mode 100644 index 5a72ce1f6..000000000 --- a/frontend/docs/lib/generated/snips/typescript/concurrency-rr/workflow.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/workflow';\nimport { hatchet } from '../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n GroupKey: string;\n};\n\ntype SimpleOutput = {\n 'to-lower': {\n TransformedMessage: string;\n };\n};\n\nconst sleep = (ms: number) =>\n new Promise((resolve) => {\n setTimeout(resolve, ms);\n });\n\n// > Concurrency Strategy With Key\nexport const simpleConcurrency = hatchet.workflow({\n name: 'simple-concurrency',\n concurrency: {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.GroupKey',\n },\n});\n\nsimpleConcurrency.task({\n name: 'to-lower',\n fn: async (input) => {\n await sleep(Math.floor(Math.random() * (1000 - 200 + 1)) + 200);\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\n// > Multiple Concurrency Keys\nexport const multipleConcurrencyKeys = hatchet.workflow({\n name: 'simple-concurrency',\n concurrency: [\n {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.Tier',\n },\n {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.Account',\n },\n ],\n});\n\nmultipleConcurrencyKeys.task({\n name: 'to-lower',\n fn: async (input) => {\n await sleep(Math.floor(Math.random() * (1000 - 200 + 1)) + 200);\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - "source": "out/typescript/concurrency-rr/workflow.ts", - "blocks": { - "concurrency_strategy_with_key": { - "start": 21, - "stop": 28 - }, - "multiple_concurrency_keys": { - "start": 41, - "stop": 55 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag/index.ts b/frontend/docs/lib/generated/snips/typescript/dag/index.ts deleted file mode 100644 index ef0a1d259..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import interface_workflow from './interface-workflow'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { interface_workflow } -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/dag/interface-workflow.ts b/frontend/docs/lib/generated/snips/typescript/dag/interface-workflow.ts deleted file mode 100644 index 68927e162..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag/interface-workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { WorkflowInputType, WorkflowOutputType } from '@hatchet-dev/typescript-sdk/v1';\nimport { hatchet } from '../hatchet-client';\n\ninterface DagInput extends WorkflowInputType {\n Message: string;\n}\n\ninterface DagOutput extends WorkflowOutputType {\n reverse: {\n Original: string;\n Transformed: string;\n };\n}\n\n// > Declaring a DAG Workflow\n// First, we declare the workflow\nexport const dag = hatchet.workflow({\n name: 'simple',\n});\n\nconst reverse = dag.task({\n name: 'reverse',\n fn: (input) => {\n return {\n Original: input.Message,\n Transformed: input.Message.split('').reverse().join(''),\n };\n },\n});\n\ndag.task({\n name: 'to-lower',\n parents: [reverse],\n fn: async (input, ctx) => {\n const r = await ctx.parentOutput(reverse);\n\n return {\n reverse: {\n Original: r.Transformed,\n Transformed: r.Transformed.toLowerCase(),\n },\n };\n },\n});\n", - "source": "out/typescript/dag/interface-workflow.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag/run.ts b/frontend/docs/lib/generated/snips/typescript/dag/run.ts deleted file mode 100644 index a44a2a257..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { dag } from './workflow';\n\nasync function main() {\n const res = await dag.run({\n Message: 'hello world',\n });\n\n console.log(res.reverse.Transformed);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/dag/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag/worker.ts b/frontend/docs/lib/generated/snips/typescript/dag/worker.ts deleted file mode 100644 index 8222ca7da..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { dag } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('dag-worker', {\n workflows: [dag],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/dag/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag/workflow.ts b/frontend/docs/lib/generated/snips/typescript/dag/workflow.ts deleted file mode 100644 index e59d34701..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\ntype DagInput = {\n Message: string;\n};\n\ntype DagOutput = {\n reverse: {\n Original: string;\n Transformed: string;\n };\n};\n\n// > Declaring a DAG Workflow\n// First, we declare the workflow\nexport const dag = hatchet.workflow({\n name: 'simple',\n});\n\n// Next, we declare the tasks bound to the workflow\nconst toLower = dag.task({\n name: 'to-lower',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\n// Next, we declare the tasks bound to the workflow\ndag.task({\n name: 'reverse',\n parents: [toLower],\n fn: async (input, ctx) => {\n const lower = await ctx.parentOutput(toLower);\n return {\n Original: input.Message,\n Transformed: lower.TransformedMessage.split('').reverse().join(''),\n };\n },\n});\n", - "source": "out/typescript/dag/workflow.ts", - "blocks": { - "declaring_a_dag_workflow": { - "start": 15, - "stop": 41 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/complex-workflow.ts b/frontend/docs/lib/generated/snips/typescript/dag_match_condition/complex-workflow.ts deleted file mode 100644 index e7cfa88be..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/complex-workflow.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Create a workflow\nimport { Or, SleepCondition, UserEventCondition } from '@hatchet-dev/typescript-sdk/v1/conditions';\nimport { ParentCondition } from '@hatchet-dev/typescript-sdk/v1/conditions/parent-condition';\nimport { Context } from '@hatchet-dev/typescript-sdk/v1/client/worker/context';\nimport { hatchet } from '../hatchet-client';\n\nexport const taskConditionWorkflow = hatchet.workflow({\n name: 'TaskConditionWorkflow',\n});\n\n// > Add base task\nconst start = taskConditionWorkflow.task({\n name: 'start',\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add wait for sleep\nconst waitForSleep = taskConditionWorkflow.task({\n name: 'waitForSleep',\n parents: [start],\n waitFor: [new SleepCondition('10s')],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add skip on event\nconst skipOnEvent = taskConditionWorkflow.task({\n name: 'skipOnEvent',\n parents: [start],\n waitFor: [new SleepCondition('10s')],\n skipIf: [new UserEventCondition('skip_on_event:skip', 'true')],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add branching\nconst leftBranch = taskConditionWorkflow.task({\n name: 'leftBranch',\n parents: [waitForSleep],\n skipIf: [new ParentCondition(waitForSleep, 'output.randomNumber > 50')],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\nconst rightBranch = taskConditionWorkflow.task({\n name: 'rightBranch',\n parents: [waitForSleep],\n skipIf: [new ParentCondition(waitForSleep, 'output.randomNumber <= 50')],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add wait for event\nconst waitForEvent = taskConditionWorkflow.task({\n name: 'waitForEvent',\n parents: [start],\n waitFor: [Or(new SleepCondition('1m'), new UserEventCondition('wait_for_event:start', 'true'))],\n fn: () => {\n return {\n randomNumber: Math.floor(Math.random() * 100) + 1,\n };\n },\n});\n\n// > Add sum\ntaskConditionWorkflow.task({\n name: 'sum',\n parents: [start, waitForSleep, waitForEvent, skipOnEvent, leftBranch, rightBranch],\n fn: async (_, ctx: Context) => {\n const one = (await ctx.parentOutput(start)).randomNumber;\n const two = (await ctx.parentOutput(waitForEvent)).randomNumber;\n const three = (await ctx.parentOutput(waitForSleep)).randomNumber;\n const four = (await ctx.parentOutput(skipOnEvent))?.randomNumber || 0;\n const five = (await ctx.parentOutput(leftBranch))?.randomNumber || 0;\n const six = (await ctx.parentOutput(rightBranch))?.randomNumber || 0;\n\n return {\n sum: one + two + three + four + five + six,\n };\n },\n});\n", - "source": "out/typescript/dag_match_condition/complex-workflow.ts", - "blocks": { - "create_a_workflow": { - "start": 2, - "stop": 9 - }, - "add_base_task": { - "start": 12, - "stop": 19 - }, - "add_wait_for_sleep": { - "start": 22, - "stop": 31 - }, - "add_skip_on_event": { - "start": 34, - "stop": 44 - }, - "add_branching": { - "start": 47, - "stop": 67 - }, - "add_wait_for_event": { - "start": 70, - "stop": 79 - }, - "add_sum": { - "start": 82, - "stop": 97 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/event.ts b/frontend/docs/lib/generated/snips/typescript/dag_match_condition/event.ts deleted file mode 100644 index fa3798a99..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/event.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\nasync function main() {\n const event = await hatchet.events.push('user:event', {\n Data: { Hello: 'World' },\n });\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - "source": "out/typescript/dag_match_condition/event.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/index.ts b/frontend/docs/lib/generated/snips/typescript/dag_match_condition/index.ts deleted file mode 100644 index 293111487..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import complex_workflow from './complex-workflow'; -import event from './event'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { complex_workflow } -export { event } -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/run.ts b/frontend/docs/lib/generated/snips/typescript/dag_match_condition/run.ts deleted file mode 100644 index 844957140..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { dagWithConditions } from './workflow';\n\nasync function main() {\n const res = await dagWithConditions.run({});\n\n console.log(res['first-task'].Completed);\n console.log(res['second-task'].Completed);\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/dag_match_condition/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/worker.ts b/frontend/docs/lib/generated/snips/typescript/dag_match_condition/worker.ts deleted file mode 100644 index ca4f1d5ab..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { dagWithConditions } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('dag-worker', {\n workflows: [dagWithConditions],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/dag_match_condition/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/workflow.ts b/frontend/docs/lib/generated/snips/typescript/dag_match_condition/workflow.ts deleted file mode 100644 index 2044957f4..000000000 --- a/frontend/docs/lib/generated/snips/typescript/dag_match_condition/workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { Or } from '@hatchet-dev/typescript-sdk/v1/conditions';\nimport { hatchet } from '../hatchet-client';\n\ntype DagInput = {};\n\ntype DagOutput = {\n 'first-task': {\n Completed: boolean;\n };\n 'second-task': {\n Completed: boolean;\n };\n};\n\nexport const dagWithConditions = hatchet.workflow({\n name: 'simple',\n});\n\nconst firstTask = dagWithConditions.task({\n name: 'first-task',\n fn: async () => {\n await sleep(2000);\n return {\n Completed: true,\n };\n },\n});\n\ndagWithConditions.task({\n name: 'second-task',\n parents: [firstTask],\n waitFor: Or({ eventKey: 'user:event' }, { sleepFor: '10s' }),\n fn: async (_, ctx) => {\n console.log('triggered by condition', ctx.triggers());\n\n return {\n Completed: true,\n };\n },\n});\n", - "source": "out/typescript/dag_match_condition/workflow.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/deep/index.ts b/frontend/docs/lib/generated/snips/typescript/deep/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/deep/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/deep/run.ts b/frontend/docs/lib/generated/snips/typescript/deep/run.ts deleted file mode 100644 index 612b50114..000000000 --- a/frontend/docs/lib/generated/snips/typescript/deep/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { parent } from './workflow';\n\nasync function main() {\n const res = await parent.run({\n Message: 'hello',\n N: 5,\n });\n\n console.log(res.parent.Sum);\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/deep/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/deep/worker.ts b/frontend/docs/lib/generated/snips/typescript/deep/worker.ts deleted file mode 100644 index 81c0bab4f..000000000 --- a/frontend/docs/lib/generated/snips/typescript/deep/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { parent, child1, child2, child3, child4, child5 } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('simple-worker', {\n workflows: [parent, child1, child2, child3, child4, child5],\n slots: 5000,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/deep/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/deep/workflow.ts b/frontend/docs/lib/generated/snips/typescript/deep/workflow.ts deleted file mode 100644 index 0ff5704e0..000000000 --- a/frontend/docs/lib/generated/snips/typescript/deep/workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n N: number;\n};\n\ntype Output = {\n transformer: {\n Sum: number;\n };\n};\n\nexport const child1 = hatchet.workflow({\n name: 'child1',\n});\n\nchild1.task({\n name: 'transformer',\n fn: () => {\n sleep(15);\n return {\n Sum: 1,\n };\n },\n});\n\nexport const child2 = hatchet.workflow({\n name: 'child2',\n});\n\nchild2.task({\n name: 'transformer',\n fn: async (input, ctx) => {\n const count = input.N;\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child1, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n sleep(15);\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n\nexport const child3 = hatchet.workflow({\n name: 'child3',\n});\n\nchild3.task({\n name: 'transformer',\n fn: async (input, ctx) => {\n const count = input.N;\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child2, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n\nexport const child4 = hatchet.workflow({\n name: 'child4',\n});\n\nchild4.task({\n name: 'transformer',\n fn: async (input, ctx) => {\n const count = input.N;\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child3, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n\nexport const child5 = hatchet.workflow({\n name: 'child5',\n});\n\nchild5.task({\n name: 'transformer',\n fn: async (input, ctx) => {\n const count = input.N;\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child4, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n\nexport const parent = hatchet.workflow({\n name: 'parent',\n});\n\nparent.task({\n name: 'parent',\n fn: async (input, ctx) => {\n const count = input.N; // Random number between 2-4\n const promises = Array(count)\n .fill(null)\n .map(() => ({ workflow: child5, input }));\n\n const results = await ctx.bulkRunChildren(promises);\n\n return {\n Sum: results.reduce((acc, r) => acc + r.transformer.Sum, 0),\n };\n },\n});\n", - "source": "out/typescript/deep/workflow.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/durable-event/event.ts b/frontend/docs/lib/generated/snips/typescript/durable-event/event.ts deleted file mode 100644 index 1d38b6bf3..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-event/event.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\nasync function main() {\n const event = await hatchet.events.push('user:update', {\n userId: '1234',\n });\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - "source": "out/typescript/durable-event/event.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/durable-event/index.ts b/frontend/docs/lib/generated/snips/typescript/durable-event/index.ts deleted file mode 100644 index 56ecba865..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-event/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import event from './event'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { event } -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/durable-event/run.ts b/frontend/docs/lib/generated/snips/typescript/durable-event/run.ts deleted file mode 100644 index 6b82013bc..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-event/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { durableEvent } from './workflow';\n\nasync function main() {\n const timeStart = Date.now();\n const res = await durableEvent.run({});\n const timeEnd = Date.now();\n console.log(`Time taken: ${timeEnd - timeStart}ms`);\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - "source": "out/typescript/durable-event/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/durable-event/worker.ts b/frontend/docs/lib/generated/snips/typescript/durable-event/worker.ts deleted file mode 100644 index 936adca8e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-event/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { durableEvent } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('durable-event-worker', {\n workflows: [durableEvent],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/durable-event/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/durable-event/workflow.ts b/frontend/docs/lib/generated/snips/typescript/durable-event/workflow.ts deleted file mode 100644 index 935bd9ada..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-event/workflow.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\n// > Durable Event\nexport const durableEvent = hatchet.durableTask({\n name: 'durable-event',\n executionTimeout: '10m',\n fn: async (_, ctx) => {\n const res = ctx.waitFor({\n eventKey: 'user:update',\n });\n\n console.log('res', res);\n\n return {\n Value: 'done',\n };\n },\n});\n\nexport const durableEventWithFilter = hatchet.durableTask({\n name: 'durable-event-with-filter',\n executionTimeout: '10m',\n fn: async (_, ctx) => {\n // > Durable Event With Filter\n const res = ctx.waitFor({\n eventKey: 'user:update',\n expression: \"input.userId == '1234'\",\n });\n\n console.log('res', res);\n\n return {\n Value: 'done',\n };\n },\n});\n", - "source": "out/typescript/durable-event/workflow.ts", - "blocks": { - "durable_event": { - "start": 5, - "stop": 19 - }, - "durable_event_with_filter": { - "start": 26, - "stop": 29 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/durable-sleep/event.ts b/frontend/docs/lib/generated/snips/typescript/durable-sleep/event.ts deleted file mode 100644 index f429a1d1f..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-sleep/event.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\nasync function main() {\n const event = await hatchet.events.push('user:event', {\n Data: { Hello: 'World' },\n });\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - "source": "out/typescript/durable-sleep/event.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/durable-sleep/index.ts b/frontend/docs/lib/generated/snips/typescript/durable-sleep/index.ts deleted file mode 100644 index 56ecba865..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-sleep/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import event from './event'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { event } -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/durable-sleep/run.ts b/frontend/docs/lib/generated/snips/typescript/durable-sleep/run.ts deleted file mode 100644 index c685b7211..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-sleep/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { durableSleep } from './workflow';\n\nasync function main() {\n const timeStart = Date.now();\n const res = await durableSleep.run({});\n const timeEnd = Date.now();\n console.log(`Time taken: ${timeEnd - timeStart}ms`);\n}\n\nif (require.main === module) {\n main()\n .then(() => process.exit(0))\n .catch((error) => {\n console.error('Error:', error);\n process.exit(1);\n });\n}\n", - "source": "out/typescript/durable-sleep/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/durable-sleep/worker.ts b/frontend/docs/lib/generated/snips/typescript/durable-sleep/worker.ts deleted file mode 100644 index fd4935585..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-sleep/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { durableSleep } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('sleep-worker', {\n workflows: [durableSleep],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/durable-sleep/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/durable-sleep/workflow.ts b/frontend/docs/lib/generated/snips/typescript/durable-sleep/workflow.ts deleted file mode 100644 index 8a00a8aa8..000000000 --- a/frontend/docs/lib/generated/snips/typescript/durable-sleep/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// import sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\nexport const durableSleep = hatchet.workflow({\n name: 'durable-sleep',\n});\n\n// > Durable Sleep\ndurableSleep.durableTask({\n name: 'durable-sleep',\n executionTimeout: '10m',\n fn: async (_, ctx) => {\n console.log('sleeping for 5s');\n const sleepRes = await ctx.sleepFor('5s');\n console.log('done sleeping for 5s', sleepRes);\n\n return {\n Value: 'done',\n };\n },\n});\n", - "source": "out/typescript/durable-sleep/workflow.ts", - "blocks": { - "durable_sleep": { - "start": 9, - "stop": 21 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/hatchet-client.ts b/frontend/docs/lib/generated/snips/typescript/hatchet-client.ts deleted file mode 100644 index 334fd7d0b..000000000 --- a/frontend/docs/lib/generated/snips/typescript/hatchet-client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { HatchetClient } from '@hatchet-dev/typescript-sdk/v1';\n\nexport const hatchet = HatchetClient.init();\n", - "source": "out/typescript/hatchet-client.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/high-memory/child-worker.ts b/frontend/docs/lib/generated/snips/typescript/high-memory/child-worker.ts deleted file mode 100644 index b8390d6eb..000000000 --- a/frontend/docs/lib/generated/snips/typescript/high-memory/child-worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// ❓ Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { child } from './workflow-with-child';\n\nasync function main() {\n const worker = await hatchet.worker('child-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [child],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 1000,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/high-memory/child-worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/high-memory/index.ts b/frontend/docs/lib/generated/snips/typescript/high-memory/index.ts deleted file mode 100644 index 55adbb2a9..000000000 --- a/frontend/docs/lib/generated/snips/typescript/high-memory/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import child_worker from './child-worker'; -import parent_worker from './parent-worker'; -import run from './run'; -import workflow_with_child from './workflow-with-child'; - -export { child_worker } -export { parent_worker } -export { run } -export { workflow_with_child } diff --git a/frontend/docs/lib/generated/snips/typescript/high-memory/parent-worker.ts b/frontend/docs/lib/generated/snips/typescript/high-memory/parent-worker.ts deleted file mode 100644 index fc7e7bb0a..000000000 --- a/frontend/docs/lib/generated/snips/typescript/high-memory/parent-worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// ❓ Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { parent } from './workflow-with-child';\n\nasync function main() {\n const worker = await hatchet.worker('parent-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [parent],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 20,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/high-memory/parent-worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/high-memory/run.ts b/frontend/docs/lib/generated/snips/typescript/high-memory/run.ts deleted file mode 100644 index 9865b4afd..000000000 --- a/frontend/docs/lib/generated/snips/typescript/high-memory/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { parent } from './workflow-with-child';\n\nasync function main() {\n // ❓ Running a Task\n const res = await parent.run({\n Message: 'HeLlO WoRlD',\n });\n\n // 👀 Access the results of the Task\n console.log(res.TransformedMessage);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/high-memory/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/high-memory/workflow-with-child.ts b/frontend/docs/lib/generated/snips/typescript/high-memory/workflow-with-child.ts deleted file mode 100644 index 93f80486c..000000000 --- a/frontend/docs/lib/generated/snips/typescript/high-memory/workflow-with-child.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// ❓ Declaring a Task\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type ChildInput = {\n Message: string;\n};\n\nexport type ParentInput = {\n Message: string;\n};\n\nexport const child = hatchet.task({\n name: 'child',\n fn: (input: ChildInput) => {\n const largePayload = new Array(1024 * 1024).fill('a').join('');\n\n return {\n TransformedMessage: largePayload,\n };\n },\n});\n\nexport const parent = hatchet.task({\n name: 'parent',\n timeout: '10m',\n fn: async (input: ParentInput, ctx) => {\n // lets generate large payload 1 mb\n const largePayload = new Array(1024 * 1024).fill('a').join('');\n\n // Send the large payload 100 times\n const num = 1000;\n\n const children = [];\n for (let i = 0; i < num; i += 1) {\n children.push({\n workflow: child,\n input: {\n Message: `Iteration ${i + 1}: ${largePayload}`,\n },\n });\n }\n\n await ctx.bulkRunNoWaitChildren(children);\n\n return {\n TransformedMessage: 'done',\n };\n },\n});\n\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - "source": "out/typescript/high-memory/workflow-with-child.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/index.ts b/frontend/docs/lib/generated/snips/typescript/index.ts deleted file mode 100644 index 51d8870cf..000000000 --- a/frontend/docs/lib/generated/snips/typescript/index.ts +++ /dev/null @@ -1,59 +0,0 @@ -import hatchet_client from './hatchet-client'; -import * as cancellations from './cancellations'; -import * as child_workflows from './child_workflows'; -import * as concurrency_rr from './concurrency-rr'; -import * as dag from './dag'; -import * as dag_match_condition from './dag_match_condition'; -import * as deep from './deep'; -import * as durable_event from './durable-event'; -import * as durable_sleep from './durable-sleep'; -import * as high_memory from './high-memory'; -import * as inferred_typing from './inferred-typing'; -import * as landing_page from './landing_page'; -import * as legacy from './legacy'; -import * as migration_guides from './migration-guides'; -import * as multiple_wf_concurrency from './multiple_wf_concurrency'; -import * as non_retryable from './non_retryable'; -import * as on_cron from './on_cron'; -import * as on_event from './on_event'; -import * as on_failure from './on_failure'; -import * as on_success from './on_success'; -import * as priority from './priority'; -import * as quickstart from './quickstart'; -import * as rate_limit from './rate_limit'; -import * as retries from './retries'; -import * as simple from './simple'; -import * as sticky from './sticky'; -import * as streaming from './streaming'; -import * as timeouts from './timeouts'; -import * as with_timeouts from './with_timeouts'; - -export { hatchet_client } -export { cancellations }; -export { child_workflows }; -export { concurrency_rr }; -export { dag }; -export { dag_match_condition }; -export { deep }; -export { durable_event }; -export { durable_sleep }; -export { high_memory }; -export { inferred_typing }; -export { landing_page }; -export { legacy }; -export { migration_guides }; -export { multiple_wf_concurrency }; -export { non_retryable }; -export { on_cron }; -export { on_event }; -export { on_failure }; -export { on_success }; -export { priority }; -export { quickstart }; -export { rate_limit }; -export { retries }; -export { simple }; -export { sticky }; -export { streaming }; -export { timeouts }; -export { with_timeouts }; diff --git a/frontend/docs/lib/generated/snips/typescript/inferred-typing/index.ts b/frontend/docs/lib/generated/snips/typescript/inferred-typing/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/inferred-typing/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/inferred-typing/run.ts b/frontend/docs/lib/generated/snips/typescript/inferred-typing/run.ts deleted file mode 100644 index db360438e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/inferred-typing/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { crazyWorkflow, declaredType, inferredType, inferredTypeDurable } from './workflow';\n\nasync function main() {\n const declaredTypeRun = declaredType.run({\n Message: 'hello',\n });\n\n const inferredTypeRun = inferredType.run({\n Message: 'hello',\n });\n\n const crazyWorkflowRun = crazyWorkflow.run({\n Message: 'hello',\n });\n\n const inferredTypeDurableRun = inferredTypeDurable.run({\n Message: 'Durable Task',\n });\n\n const [declaredTypeResult, inferredTypeResult, inferredTypeDurableResult, crazyWorkflowResult] =\n await Promise.all([declaredTypeRun, inferredTypeRun, inferredTypeDurableRun, crazyWorkflowRun]);\n\n console.log('declaredTypeResult', declaredTypeResult);\n console.log('inferredTypeResult', inferredTypeResult);\n console.log('inferredTypeDurableResult', inferredTypeDurableResult);\n console.log('crazyWorkflowResult', crazyWorkflowResult);\n console.log('declaredTypeResult.TransformedMessage', declaredTypeResult.TransformedMessage);\n console.log('inferredTypeResult.TransformedMessage', inferredTypeResult.TransformedMessage);\n console.log(\n 'inferredTypeDurableResult.TransformedMessage',\n inferredTypeDurableResult.TransformedMessage\n );\n console.log('crazyWorkflowResult.TransformedMessage', crazyWorkflowResult.TransformedMessage);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/inferred-typing/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/inferred-typing/worker.ts b/frontend/docs/lib/generated/snips/typescript/inferred-typing/worker.ts deleted file mode 100644 index 04f553f89..000000000 --- a/frontend/docs/lib/generated/snips/typescript/inferred-typing/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { declaredType, inferredType, inferredTypeDurable, crazyWorkflow } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('simple-worker', {\n workflows: [declaredType, inferredType, inferredTypeDurable, crazyWorkflow],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/inferred-typing/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/inferred-typing/workflow.ts b/frontend/docs/lib/generated/snips/typescript/inferred-typing/workflow.ts deleted file mode 100644 index cc9b29599..000000000 --- a/frontend/docs/lib/generated/snips/typescript/inferred-typing/workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n};\n\ntype SimpleOutput = {\n TransformedMessage: string;\n};\n\nexport const declaredType = hatchet.task({\n name: 'declared-type',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nexport const inferredType = hatchet.task({\n name: 'inferred-type',\n fn: (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n\nexport const inferredTypeDurable = hatchet.durableTask({\n name: 'inferred-type-durable',\n fn: async (input: SimpleInput, ctx) => {\n // await ctx.sleepFor('5s');\n\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n\nexport const crazyWorkflow = hatchet.workflow({\n name: 'crazy-workflow',\n});\n\nconst step1 = crazyWorkflow.task(declaredType);\n// crazyWorkflow.task(inferredTypeDurable);\n\ncrazyWorkflow.task({\n parents: [step1],\n ...inferredType.taskDef,\n});\n", - "source": "out/typescript/inferred-typing/workflow.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/landing_page/durable-excution.ts b/frontend/docs/lib/generated/snips/typescript/landing_page/durable-excution.ts deleted file mode 100644 index 5a4e9cd2d..000000000 --- a/frontend/docs/lib/generated/snips/typescript/landing_page/durable-excution.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { Or } from '@hatchet-dev/typescript-sdk/v1/conditions';\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\nasync function main() {\n // > Declaring a Durable Task\n const simple = hatchet.durableTask({\n name: 'simple',\n fn: async (input: SimpleInput, ctx) => {\n await ctx.waitFor(\n Or(\n {\n eventKey: 'user:pay',\n expression: 'input.Status == \"PAID\"',\n },\n {\n sleepFor: '24h',\n }\n )\n );\n\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n });\n\n // > Running a Task\n const result = await simple.run({ Message: 'Hello, World!' });\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/landing_page/durable-excution.ts", - "blocks": { - "declaring_a_durable_task": { - "start": 10, - "stop": 29 - }, - "running_a_task": { - "start": 32, - "stop": 32 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/landing_page/event-signaling.ts b/frontend/docs/lib/generated/snips/typescript/landing_page/event-signaling.ts deleted file mode 100644 index 31c684bbd..000000000 --- a/frontend/docs/lib/generated/snips/typescript/landing_page/event-signaling.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// > Trigger on an event\nexport const simple = hatchet.task({\n name: 'simple',\n onEvents: ['user:created'],\n fn: (input: SimpleInput) => {\n // ...\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - "source": "out/typescript/landing_page/event-signaling.ts", - "blocks": { - "trigger_on_an_event": { - "start": 9, - "stop": 18 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/landing_page/flow-control.ts b/frontend/docs/lib/generated/snips/typescript/landing_page/flow-control.ts deleted file mode 100644 index 40b925873..000000000 --- a/frontend/docs/lib/generated/snips/typescript/landing_page/flow-control.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/protoc/v1/workflows';\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// > Process what you can handle\nexport const simple = hatchet.task({\n name: 'simple',\n concurrency: {\n expression: 'input.user_id',\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n maxRuns: 1,\n },\n rateLimits: [\n {\n key: 'api_throttle',\n units: 1,\n },\n ],\n fn: (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - "source": "out/typescript/landing_page/flow-control.ts", - "blocks": { - "process_what_you_can_handle": { - "start": 10, - "stop": 28 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/landing_page/index.ts b/frontend/docs/lib/generated/snips/typescript/landing_page/index.ts deleted file mode 100644 index 7aeaa39e5..000000000 --- a/frontend/docs/lib/generated/snips/typescript/landing_page/index.ts +++ /dev/null @@ -1,13 +0,0 @@ -import durable_excution from './durable-excution'; -import event_signaling from './event-signaling'; -import flow_control from './flow-control'; -import queues from './queues'; -import scheduling from './scheduling'; -import task_routing from './task-routing'; - -export { durable_excution } -export { event_signaling } -export { flow_control } -export { queues } -export { scheduling } -export { task_routing } diff --git a/frontend/docs/lib/generated/snips/typescript/landing_page/queues.ts b/frontend/docs/lib/generated/snips/typescript/landing_page/queues.ts deleted file mode 100644 index 0e81f4f93..000000000 --- a/frontend/docs/lib/generated/snips/typescript/landing_page/queues.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\nasync function main() {\n // > Declaring a Task\n const simple = hatchet.task({\n name: 'simple',\n fn: (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n });\n\n // > Running a Task\n const result = await simple.run({ Message: 'Hello, World!' });\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/landing_page/queues.ts", - "blocks": { - "declaring_a_task": { - "start": 9, - "stop": 16 - }, - "running_a_task": { - "start": 19, - "stop": 19 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/landing_page/scheduling.ts b/frontend/docs/lib/generated/snips/typescript/landing_page/scheduling.ts deleted file mode 100644 index de8772a3a..000000000 --- a/frontend/docs/lib/generated/snips/typescript/landing_page/scheduling.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { simple } from './flow-control';\n\n// > Schedules and Crons\nconst tomorrow = new Date(Date.now() + 1000 * 60 * 60 * 24);\nconst scheduled = simple.schedule(tomorrow, {\n Message: 'Hello, World!',\n});\n\nconst cron = simple.cron('every-day', '0 0 * * *', {\n Message: 'Hello, World!',\n});\n", - "source": "out/typescript/landing_page/scheduling.ts", - "blocks": { - "schedules_and_crons": { - "start": 4, - "stop": 11 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/landing_page/task-routing.ts b/frontend/docs/lib/generated/snips/typescript/landing_page/task-routing.ts deleted file mode 100644 index 0ee69781b..000000000 --- a/frontend/docs/lib/generated/snips/typescript/landing_page/task-routing.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// > Route tasks to workers with matching labels\nexport const simple = hatchet.task({\n name: 'simple',\n desiredWorkerLabels: {\n cpu: {\n value: '2x',\n },\n },\n fn: (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nhatchet.worker('task-routing-worker', {\n workflows: [simple],\n labels: {\n cpu: process.env.CPU_LABEL,\n },\n});\n", - "source": "out/typescript/landing_page/task-routing.ts", - "blocks": { - "route_tasks_to_workers_with_matching_labels": { - "start": 9, - "stop": 28 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/legacy/index.ts b/frontend/docs/lib/generated/snips/typescript/legacy/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/legacy/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/legacy/run.ts b/frontend/docs/lib/generated/snips/typescript/legacy/run.ts deleted file mode 100644 index 6b6a53195..000000000 --- a/frontend/docs/lib/generated/snips/typescript/legacy/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n const res = await hatchet.run<{ Message: string }, { step2: string }>(simple, {\n Message: 'hello',\n });\n\n console.log(res.step2);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/legacy/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/legacy/worker.ts b/frontend/docs/lib/generated/snips/typescript/legacy/worker.ts deleted file mode 100644 index facf872d3..000000000 --- a/frontend/docs/lib/generated/snips/typescript/legacy/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('legacy-worker', {\n workflows: [simple],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/legacy/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/legacy/workflow.ts b/frontend/docs/lib/generated/snips/typescript/legacy/workflow.ts deleted file mode 100644 index bb0702341..000000000 --- a/frontend/docs/lib/generated/snips/typescript/legacy/workflow.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { Workflow } from '@hatchet-dev/typescript-sdk/workflow';\n\nexport const simple: Workflow = {\n id: 'legacy-workflow',\n description: 'test',\n on: {\n event: 'user:create',\n },\n steps: [\n {\n name: 'step1',\n run: async (ctx) => {\n const input = ctx.workflowInput();\n\n return { step1: `original input: ${input.Message}` };\n },\n },\n {\n name: 'step2',\n parents: ['step1'],\n run: (ctx) => {\n const step1Output = ctx.stepOutput('step1');\n\n return { step2: `step1 output: ${step1Output.step1}` };\n },\n },\n ],\n};\n", - "source": "out/typescript/legacy/workflow.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/migration-guides/hatchet-client.ts b/frontend/docs/lib/generated/snips/typescript/migration-guides/hatchet-client.ts deleted file mode 100644 index 52193a5a8..000000000 --- a/frontend/docs/lib/generated/snips/typescript/migration-guides/hatchet-client.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import HatchetClient from '@hatchet-dev/typescript-sdk/sdk';\n\nexport const hatchet = HatchetClient.init();\n", - "source": "out/typescript/migration-guides/hatchet-client.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/migration-guides/index.ts b/frontend/docs/lib/generated/snips/typescript/migration-guides/index.ts deleted file mode 100644 index d47a3d13a..000000000 --- a/frontend/docs/lib/generated/snips/typescript/migration-guides/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import hatchet_client from './hatchet-client'; -import mergent from './mergent'; - -export { hatchet_client } -export { mergent } diff --git a/frontend/docs/lib/generated/snips/typescript/migration-guides/mergent.ts b/frontend/docs/lib/generated/snips/typescript/migration-guides/mergent.ts deleted file mode 100644 index 579c8134e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/migration-guides/mergent.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from './hatchet-client';\n\nfunction processImage(\n imageUrl: string,\n filters: string[]\n): Promise<{ url: string; size: number; format: string }> {\n // Do some image processing\n return Promise.resolve({ url: imageUrl, size: 100, format: 'png' });\n}\n// > Before (Mergent)\nexport async function processImageTask(req: { body: { imageUrl: string; filters: string[] } }) {\n const { imageUrl, filters } = req.body;\n try {\n const result = await processImage(imageUrl, filters);\n return { success: true, processedUrl: result.url };\n } catch (error) {\n console.error('Image processing failed:', error);\n throw error;\n }\n}\n\n// > After (Hatchet)\ntype ImageProcessInput = {\n imageUrl: string;\n filters: string[];\n};\n\ntype ImageProcessOutput = {\n processedUrl: string;\n metadata: {\n size: number;\n format: string;\n appliedFilters: string[];\n };\n};\n\nexport const imageProcessor = hatchet.task({\n name: 'image-processor',\n retries: 3,\n executionTimeout: '10m',\n fn: async ({ imageUrl, filters }: ImageProcessInput): Promise => {\n // Do some image processing\n const result = await processImage(imageUrl, filters);\n\n if (!result.url) throw new Error('Processing failed to generate URL');\n\n return {\n processedUrl: result.url,\n metadata: {\n size: result.size,\n format: result.format,\n appliedFilters: filters,\n },\n };\n },\n});\n\nasync function run() {\n // > Running a task (Mergent)\n const options = {\n method: 'POST',\n headers: { Authorization: 'Bearer ', 'Content-Type': 'application/json' },\n body: JSON.stringify({\n name: '4cf95241-fa19-47ef-8a67-71e483747649',\n queue: 'default',\n request: {\n url: 'https://example.com',\n headers: { Authorization: 'fake-secret-token', 'Content-Type': 'application/json' },\n body: 'Hello, world!',\n },\n }),\n };\n\n fetch('https://api.mergent.co/v2/tasks', options)\n .then((response) => response.json())\n .then((response) => console.log(response))\n .catch((err) => console.error(err));\n\n // > Running a task (Hatchet)\n const result = await imageProcessor.run({\n imageUrl: 'https://example.com/image.png',\n filters: ['blur'],\n });\n\n // you can await fully typed results\n console.log(result);\n}\n\nasync function schedule() {\n // > Scheduling tasks (Mergent)\n const options = {\n // same options as before\n body: JSON.stringify({\n // same body as before\n delay: '5m',\n }),\n };\n\n // > Scheduling tasks (Hatchet)\n // Schedule the task to run at a specific time\n const runAt = new Date(Date.now() + 1000 * 60 * 60 * 24);\n imageProcessor.schedule(runAt, {\n imageUrl: 'https://example.com/image.png',\n filters: ['blur'],\n });\n\n // Schedule the task to run every hour\n imageProcessor.cron('run-hourly', '0 * * * *', {\n imageUrl: 'https://example.com/image.png',\n filters: ['blur'],\n });\n}\n", - "source": "out/typescript/migration-guides/mergent.ts", - "blocks": { - "before_mergent": { - "start": 11, - "stop": 20 - }, - "after_hatchet": { - "start": 23, - "stop": 56 - }, - "running_a_task_mergent": { - "start": 60, - "stop": 77 - }, - "running_a_task_hatchet": { - "start": 80, - "stop": 86 - }, - "scheduling_tasks_mergent": { - "start": 91, - "stop": 97 - }, - "scheduling_tasks_hatchet": { - "start": 100, - "stop": 111 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/index.ts b/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/run.ts b/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/run.ts deleted file mode 100644 index 72ce949e9..000000000 --- a/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { multiConcurrency } from './workflow';\n\nasync function main() {\n const res = await multiConcurrency.run([\n {\n Message: 'Hello World',\n GroupKey: 'A',\n },\n {\n Message: 'Goodbye Moon',\n GroupKey: 'A',\n },\n {\n Message: 'Hello World B',\n GroupKey: 'B',\n },\n ]);\n\n console.log(res[0]['to-lower'].TransformedMessage);\n console.log(res[1]['to-lower'].TransformedMessage);\n console.log(res[2]['to-lower'].TransformedMessage);\n}\n\nif (require.main === module) {\n main().then(() => process.exit(0));\n}\n", - "source": "out/typescript/multiple_wf_concurrency/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/worker.ts b/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/worker.ts deleted file mode 100644 index 29e5481e7..000000000 --- a/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { multiConcurrency } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('simple-concurrency-worker', {\n workflows: [multiConcurrency],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/multiple_wf_concurrency/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/workflow.ts b/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/workflow.ts deleted file mode 100644 index fa38c3dee..000000000 --- a/frontend/docs/lib/generated/snips/typescript/multiple_wf_concurrency/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { ConcurrencyLimitStrategy } from '@hatchet-dev/typescript-sdk/workflow';\nimport { hatchet } from '../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n GroupKey: string;\n};\n\ntype SimpleOutput = {\n 'to-lower': {\n TransformedMessage: string;\n };\n};\n\nconst sleep = (ms: number) =>\n new Promise((resolve) => {\n setTimeout(resolve, ms);\n });\n\n// > Concurrency Strategy With Key\nexport const multiConcurrency = hatchet.workflow({\n name: 'simple-concurrency',\n concurrency: [\n {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.GroupKey',\n },\n {\n maxRuns: 1,\n limitStrategy: ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n expression: 'input.UserId',\n },\n ],\n});\n\nmultiConcurrency.task({\n name: 'to-lower',\n fn: async (input) => {\n await sleep(Math.floor(Math.random() * (1000 - 200 + 1)) + 200);\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - "source": "out/typescript/multiple_wf_concurrency/workflow.ts", - "blocks": { - "concurrency_strategy_with_key": { - "start": 21, - "stop": 35 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/non_retryable/index.ts b/frontend/docs/lib/generated/snips/typescript/non_retryable/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/non_retryable/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/non_retryable/run.ts b/frontend/docs/lib/generated/snips/typescript/non_retryable/run.ts deleted file mode 100644 index 222ec96b5..000000000 --- a/frontend/docs/lib/generated/snips/typescript/non_retryable/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { nonRetryableWorkflow } from './workflow';\n\nasync function main() {\n const res = await nonRetryableWorkflow.runNoWait({});\n\n console.log(res);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/non_retryable/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/non_retryable/worker.ts b/frontend/docs/lib/generated/snips/typescript/non_retryable/worker.ts deleted file mode 100644 index 1946156c3..000000000 --- a/frontend/docs/lib/generated/snips/typescript/non_retryable/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { nonRetryableWorkflow } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('no-retry-worker', {\n workflows: [nonRetryableWorkflow],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/non_retryable/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/non_retryable/workflow.ts b/frontend/docs/lib/generated/snips/typescript/non_retryable/workflow.ts deleted file mode 100644 index 185cea05e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/non_retryable/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { NonRetryableError } from '@hatchet-dev/typescript-sdk/v1/task';\nimport { hatchet } from '../hatchet-client';\n\nexport const nonRetryableWorkflow = hatchet.workflow({\n name: 'no-retry-workflow',\n});\n\n// > Non-retrying task\nconst shouldNotRetry = nonRetryableWorkflow.task({\n name: 'should-not-retry',\n fn: () => {\n throw new NonRetryableError('This task should not retry');\n },\n retries: 1,\n});\n\n// Create a task that should retry\nconst shouldRetryWrongErrorType = nonRetryableWorkflow.task({\n name: 'should-retry-wrong-error-type',\n fn: () => {\n throw new Error('This task should not retry');\n },\n retries: 1,\n});\n\nconst shouldNotRetrySuccessfulTask = nonRetryableWorkflow.task({\n name: 'should-not-retry-successful-task',\n fn: () => {},\n});\n", - "source": "out/typescript/non_retryable/workflow.ts", - "blocks": { - "non_retrying_task": { - "start": 9, - "stop": 15 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_cron/index.ts b/frontend/docs/lib/generated/snips/typescript/on_cron/index.ts deleted file mode 100644 index ff57fe967..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_cron/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import worker from './worker'; -import workflow from './workflow'; - -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/on_cron/worker.ts b/frontend/docs/lib/generated/snips/typescript/on_cron/worker.ts deleted file mode 100644 index 1c52a4d10..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_cron/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { onCron } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('on-cron-worker', {\n workflows: [onCron],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/on_cron/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_cron/workflow.ts b/frontend/docs/lib/generated/snips/typescript/on_cron/workflow.ts deleted file mode 100644 index 936965786..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_cron/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\nexport type Input = {\n Message: string;\n};\n\ntype OnCronOutput = {\n job: {\n TransformedMessage: string;\n };\n};\n\n// > Run Workflow on Cron\nexport const onCron = hatchet.workflow({\n name: 'on-cron-workflow',\n on: {\n // 👀 add a cron expression to run the workflow every 15 minutes\n cron: '*/15 * * * *',\n },\n});\n\nonCron.task({\n name: 'job',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - "source": "out/typescript/on_cron/workflow.ts", - "blocks": { - "run_workflow_on_cron": { - "start": 14, - "stop": 20 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/event.e2e.ts b/frontend/docs/lib/generated/snips/typescript/on_event/event.e2e.ts deleted file mode 100644 index 83c023a1e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_event/event.e2e.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import sleep from '@hatchet-dev/typescript-sdk-dev/typescript-sdk/util/sleep';\nimport { randomUUID } from 'crypto';\nimport { Event } from '@hatchet-dev/typescript-sdk-dev/typescript-sdk/protoc/events';\nimport { SIMPLE_EVENT, lower, Input } from './workflow';\nimport { hatchet } from '../hatchet-client';\nimport { Worker } from '../../client/worker/worker';\n\nxdescribe('events-e2e', () => {\n let worker: Worker;\n let testRunId: string;\n\n beforeEach(async () => {\n testRunId = randomUUID();\n\n worker = await hatchet.worker('event-worker');\n await worker.registerWorkflow(lower);\n\n void worker.start();\n });\n\n afterAll(async () => {\n await worker.stop();\n await sleep(2000);\n });\n\n async function setupEventFilter(expression?: string, payload: Record = {}) {\n const finalExpression =\n expression || `input.ShouldSkip == false && payload.testRunId == '${testRunId}'`;\n\n const workflowId = (await hatchet.workflows.get(lower.name)).metadata.id;\n\n const filter = await hatchet.filters.create({\n workflowId,\n expression: finalExpression,\n scope: testRunId,\n payload: { testRunId, ...payload },\n });\n\n return async () => {\n await hatchet.filters.delete(filter.metadata.id);\n };\n }\n\n // Helper function to wait for events to process and fetch runs\n async function waitForEventsToProcess(events: Event[]): Promise> {\n await sleep(3000);\n\n const persisted = (await hatchet.events.list({ limit: 100 })).rows || [];\n\n // Ensure all our events are persisted\n const eventIds = new Set(events.map((e) => e.eventId));\n const persistedIds = new Set(persisted.map((e) => e.metadata.id));\n expect(Array.from(eventIds).every((id) => persistedIds.has(id))).toBeTruthy();\n\n let attempts = 0;\n const maxAttempts = 15;\n const eventToRuns: Record = {};\n\n while (true) {\n console.log('Waiting for event runs to complete...');\n if (attempts > maxAttempts) {\n console.log('Timed out waiting for event runs to complete.');\n return {};\n }\n\n attempts += 1;\n\n // For each event, fetch its runs\n const runsPromises = events.map(async (event) => {\n const runs = await hatchet.runs.list({\n triggeringEventExternalId: event.eventId,\n });\n\n // Extract metadata from event\n const meta = event.additionalMetadata ? JSON.parse(event.additionalMetadata) : {};\n\n const payload = event.payload ? JSON.parse(event.payload) : {};\n\n return {\n event: {\n id: event.eventId,\n payload,\n meta,\n shouldHaveRuns: Boolean(meta.should_have_runs),\n testRunId: meta.test_run_id,\n },\n runs: runs.rows || [],\n };\n });\n\n const eventRuns = await Promise.all(runsPromises);\n\n // If all events have no runs yet, wait and retry\n if (eventRuns.every(({ runs }) => runs.length === 0)) {\n await sleep(1000);\n\n continue;\n }\n\n // Store runs by event ID\n for (const { event, runs } of eventRuns) {\n eventToRuns[event.id] = runs;\n }\n\n // Check if any runs are still in progress\n const anyInProgress = Object.values(eventToRuns).some((runs) =>\n runs.some((run) => run.status === 'QUEUED' || run.status === 'RUNNING')\n );\n\n if (anyInProgress) {\n await sleep(1000);\n\n continue;\n }\n\n break;\n }\n\n return eventToRuns;\n }\n\n // Helper to verify runs match expectations\n function verifyEventRuns(eventData: any, runs: any[]) {\n if (eventData.shouldHaveRuns) {\n expect(runs.length).toBeGreaterThan(0);\n } else {\n expect(runs.length).toBe(0);\n }\n }\n\n // Helper to create bulk push event objects\n function createBulkPushEvent({\n index = 1,\n ShouldSkip = false,\n shouldHaveRuns = true,\n key = SIMPLE_EVENT,\n payload = {},\n scope = null,\n }: {\n index?: number;\n ShouldSkip?: boolean;\n shouldHaveRuns?: boolean;\n key?: string;\n payload?: Record;\n scope?: string | null;\n }) {\n return {\n key,\n payload: {\n ShouldSkip,\n Message: `This is event ${index}`,\n ...payload,\n },\n additionalMetadata: {\n should_have_runs: shouldHaveRuns,\n test_run_id: testRunId,\n key,\n index,\n },\n scope: scope || undefined,\n };\n }\n\n // Helper to create payload object\n function createEventPayload(ShouldSkip: boolean): Input {\n return { ShouldSkip, Message: 'This is event 1' };\n }\n\n it('should push an event', async () => {\n const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false));\n expect(event.eventId).toBeTruthy();\n }, 10000);\n\n it('should push an event asynchronously', async () => {\n const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false));\n expect(event.eventId).toBeTruthy();\n }, 10000);\n\n it('should bulk push events', async () => {\n const events = [\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 1', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user123' },\n },\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 2', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user456' },\n },\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 3', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user789' },\n },\n ];\n\n const result = await hatchet.events.bulkPush(SIMPLE_EVENT, events);\n\n expect(result.events.length).toBe(3);\n\n // Sort and verify namespacing\n const sortedEvents = [...events].sort((a, b) => a.key.localeCompare(b.key));\n const sortedResults = [...result.events].sort((a, b) => a.key.localeCompare(b.key));\n\n sortedEvents.forEach((originalEvent, index) => {\n const returnedEvent = sortedResults[index];\n expect(returnedEvent.key).toBe(originalEvent.key);\n });\n }, 15000);\n\n it('should process events according to event engine behavior', async () => {\n const eventPromises = [\n createBulkPushEvent({}),\n createBulkPushEvent({\n key: 'thisisafakeeventfoobarbaz',\n shouldHaveRuns: false,\n }),\n ].map((event) => convertBulkToSingle(event));\n const events = await Promise.all(eventPromises);\n\n const eventToRuns = await waitForEventsToProcess(events);\n\n // Verify each event's runs\n Object.keys(eventToRuns).forEach((eventId) => {\n const runs = eventToRuns[eventId];\n const eventInfo = events.find((e) => e.eventId === eventId);\n\n if (eventInfo) {\n const meta = JSON.parse(eventInfo.additionalMetadata || '{}');\n verifyEventRuns(\n {\n shouldHaveRuns: Boolean(meta.should_have_runs),\n },\n runs\n );\n }\n });\n }, 30000);\n\n function generateBulkEvents() {\n return [\n createBulkPushEvent({\n index: 1,\n ShouldSkip: false,\n shouldHaveRuns: true,\n }),\n createBulkPushEvent({\n index: 2,\n ShouldSkip: true,\n shouldHaveRuns: true,\n }),\n createBulkPushEvent({\n index: 3,\n ShouldSkip: false,\n shouldHaveRuns: true,\n scope: testRunId,\n }),\n createBulkPushEvent({\n index: 4,\n ShouldSkip: true,\n shouldHaveRuns: false,\n scope: testRunId,\n }),\n createBulkPushEvent({\n index: 5,\n ShouldSkip: true,\n shouldHaveRuns: false,\n scope: testRunId,\n key: 'thisisafakeeventfoobarbaz',\n }),\n createBulkPushEvent({\n index: 6,\n ShouldSkip: false,\n shouldHaveRuns: false,\n scope: testRunId,\n key: 'thisisafakeeventfoobarbaz',\n }),\n ];\n }\n\n async function convertBulkToSingle(event: any) {\n return hatchet.events.push(event.key, event.payload, {\n scope: event.scope,\n additionalMetadata: event.additionalMetadata,\n priority: event.priority,\n });\n }\n\n it('should handle event skipping and filtering without bulk push', async () => {\n const cleanup = await setupEventFilter();\n\n try {\n const rawEvents = generateBulkEvents();\n const eventPromises = rawEvents.map((event) => convertBulkToSingle(event));\n const events = await Promise.all(eventPromises);\n\n const eventToRuns = await waitForEventsToProcess(events);\n\n // Verify each event's runs\n Object.keys(eventToRuns).forEach((eventId) => {\n const runs = eventToRuns[eventId];\n const eventInfo = events.find((e) => e.eventId === eventId);\n\n if (eventInfo) {\n const meta = JSON.parse(eventInfo.additionalMetadata || '{}');\n verifyEventRuns(\n {\n shouldHaveRuns: Boolean(meta.should_have_runs),\n },\n runs\n );\n }\n });\n } finally {\n await cleanup();\n }\n }, 30000);\n\n it('should filter events by payload expression not matching', async () => {\n const cleanup = await setupEventFilter(\"input.ShouldSkip == false && payload.foobar == 'baz'\", {\n foobar: 'qux',\n });\n\n try {\n const event = await hatchet.events.push(\n SIMPLE_EVENT,\n { Message: 'This is event 1', ShouldSkip: false },\n {\n scope: testRunId,\n additionalMetadata: {\n should_have_runs: 'false',\n test_run_id: testRunId,\n key: '1',\n },\n }\n );\n\n const eventToRuns = await waitForEventsToProcess([event]);\n expect(Object.keys(eventToRuns).length).toBe(0);\n } finally {\n await cleanup();\n }\n }, 20000);\n\n it('should filter events by payload expression matching', async () => {\n const cleanup = await setupEventFilter(\"input.ShouldSkip == false && payload.foobar == 'baz'\", {\n foobar: 'baz',\n });\n\n try {\n const event = await hatchet.events.push(\n SIMPLE_EVENT,\n { Message: 'This is event 1', ShouldSkip: false },\n {\n scope: testRunId,\n additionalMetadata: {\n should_have_runs: 'true',\n test_run_id: testRunId,\n key: '1',\n },\n }\n );\n\n const eventToRuns = await waitForEventsToProcess([event]);\n const runs = Object.values(eventToRuns)[0] || [];\n expect(runs.length).toBeGreaterThan(0);\n } finally {\n await cleanup();\n }\n }, 20000);\n});\n", - "source": "out/typescript/on_event/event.e2e.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/event.ts b/frontend/docs/lib/generated/snips/typescript/on_event/event.ts deleted file mode 100644 index 7200fd8e1..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_event/event.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { Input } from './workflow';\n\nasync function main() {\n // > Pushing an Event\n const res = await hatchet.events.push('simple-event:create', {\n Message: 'hello',\n ShouldSkip: false,\n });\n\n console.log(res.eventId);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/on_event/event.ts", - "blocks": { - "pushing_an_event": { - "start": 6, - "stop": 9 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/filter.ts b/frontend/docs/lib/generated/snips/typescript/on_event/filter.ts deleted file mode 100644 index 3ec669d56..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_event/filter.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { lower, SIMPLE_EVENT } from './workflow';\n\n// > Create a filter\nhatchet.filters.create({\n workflowId: lower.id,\n expression: 'input.ShouldSkip == false',\n scope: 'foobarbaz',\n payload: {\n main_character: 'Anna',\n supporting_character: 'Stiva',\n location: 'Moscow',\n },\n});\n\n// > Skip a run\nhatchet.events.push(\n SIMPLE_EVENT,\n {\n Message: 'hello',\n ShouldSkip: true,\n },\n {\n scope: 'foobarbaz',\n }\n);\n\n// > Trigger a run\nhatchet.events.push(\n SIMPLE_EVENT,\n {\n Message: 'hello',\n ShouldSkip: false,\n },\n {\n scope: 'foobarbaz',\n }\n);\n", - "source": "out/typescript/on_event/filter.ts", - "blocks": { - "create_a_filter": { - "start": 5, - "stop": 14 - }, - "skip_a_run": { - "start": 17, - "stop": 26 - }, - "trigger_a_run": { - "start": 29, - "stop": 38 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/index.ts b/frontend/docs/lib/generated/snips/typescript/on_event/index.ts deleted file mode 100644 index e0d845dbb..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_event/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import evente2e from './event.e2e'; -import event from './event'; -import filter from './filter'; -import worker from './worker'; -import workflow from './workflow'; - -export { evente2e } -export { event } -export { filter } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/worker.ts b/frontend/docs/lib/generated/snips/typescript/on_event/worker.ts deleted file mode 100644 index f3616ab17..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_event/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { lower, upper } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('on-event-worker', {\n workflows: [lower, upper],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/on_event/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/workflow.ts b/frontend/docs/lib/generated/snips/typescript/on_event/workflow.ts deleted file mode 100644 index 4b5faec4d..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_event/workflow.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\nexport type Input = {\n Message: string;\n ShouldSkip: boolean;\n};\n\nexport const SIMPLE_EVENT = 'simple-event:create';\n\ntype LowerOutput = {\n lower: {\n TransformedMessage: string;\n };\n};\n\n// > Run workflow on event\nexport const lower = hatchet.workflow({\n name: 'lower',\n // 👀 Declare the event that will trigger the workflow\n onEvents: ['simple-event:create'],\n});\n\n// > Workflow with filter\nexport const lowerWithFilter = hatchet.workflow({\n name: 'lower',\n // 👀 Declare the event that will trigger the workflow\n onEvents: ['simple-event:create'],\n defaultFilters: [\n {\n expression: 'true',\n scope: 'example-scope',\n payload: {\n mainCharacter: 'Anna',\n supportingCharacter: 'Stiva',\n location: 'Moscow',\n },\n },\n ],\n});\n\nlower.task({\n name: 'lower',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\ntype UpperOutput = {\n upper: {\n TransformedMessage: string;\n };\n};\n\nexport const upper = hatchet.workflow({\n name: 'upper',\n on: {\n event: SIMPLE_EVENT,\n },\n});\n\nupper.task({\n name: 'upper',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n\n// > Accessing the filter payload\nlowerWithFilter.task({\n name: 'lowerWithFilter',\n fn: (input, ctx) => {\n console.log(ctx.filterPayload());\n },\n});\n", - "source": "out/typescript/on_event/workflow.ts", - "blocks": { - "run_workflow_on_event": { - "start": 17, - "stop": 21 - }, - "workflow_with_filter": { - "start": 24, - "stop": 39 - }, - "accessing_the_filter_payload": { - "start": 73, - "stop": 78 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_failure/index.ts b/frontend/docs/lib/generated/snips/typescript/on_failure/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_failure/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/on_failure/run.ts b/frontend/docs/lib/generated/snips/typescript/on_failure/run.ts deleted file mode 100644 index ab789ba7e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_failure/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { failureWorkflow } from './workflow';\n\nasync function main() {\n try {\n const res = await failureWorkflow.run({});\n console.log(res);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/on_failure/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_failure/worker.ts b/frontend/docs/lib/generated/snips/typescript/on_failure/worker.ts deleted file mode 100644 index eb98fa0ad..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_failure/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { failureWorkflow } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('always-fail-worker', {\n workflows: [failureWorkflow],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/on_failure/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_failure/workflow.ts b/frontend/docs/lib/generated/snips/typescript/on_failure/workflow.ts deleted file mode 100644 index c1f788124..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_failure/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\n// > On Failure Task\nexport const failureWorkflow = hatchet.workflow({\n name: 'always-fail',\n});\n\nfailureWorkflow.task({\n name: 'always-fail',\n fn: async () => {\n throw new Error('intentional failure');\n },\n});\n\nfailureWorkflow.onFailure({\n name: 'on-failure',\n fn: async (input, ctx) => {\n console.log('onFailure for run:', ctx.workflowRunId());\n return {\n 'on-failure': 'success',\n };\n },\n});\n", - "source": "out/typescript/on_failure/workflow.ts", - "blocks": { - "on_failure_task": { - "start": 4, - "stop": 23 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_success/index.ts b/frontend/docs/lib/generated/snips/typescript/on_success/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_success/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/on_success/run.ts b/frontend/docs/lib/generated/snips/typescript/on_success/run.ts deleted file mode 100644 index cbe5a0d09..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_success/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { onSuccessDag } from './workflow';\n\nasync function main() {\n try {\n const res2 = await onSuccessDag.run({});\n console.log(res2);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/on_success/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_success/worker.ts b/frontend/docs/lib/generated/snips/typescript/on_success/worker.ts deleted file mode 100644 index 27a8aa9a9..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_success/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { onSuccessDag } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('always-succeed-worker', {\n workflows: [onSuccessDag],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/on_success/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_success/workflow.ts b/frontend/docs/lib/generated/snips/typescript/on_success/workflow.ts deleted file mode 100644 index 4bd76d828..000000000 --- a/frontend/docs/lib/generated/snips/typescript/on_success/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\n// > On Success DAG\nexport const onSuccessDag = hatchet.workflow({\n name: 'on-success-dag',\n});\n\nonSuccessDag.task({\n name: 'always-succeed',\n fn: async () => {\n return {\n 'always-succeed': 'success',\n };\n },\n});\nonSuccessDag.task({\n name: 'always-succeed2',\n fn: async () => {\n return {\n 'always-succeed': 'success',\n };\n },\n});\n\n// 👀 onSuccess handler will run if all tasks in the workflow succeed\nonSuccessDag.onSuccess({\n fn: (_, ctx) => {\n console.log('onSuccess for run:', ctx.workflowRunId());\n return {\n 'on-success': 'success',\n };\n },\n});\n", - "source": "out/typescript/on_success/workflow.ts", - "blocks": { - "on_success_dag": { - "start": 4, - "stop": 33 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/priority/index.ts b/frontend/docs/lib/generated/snips/typescript/priority/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/priority/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/priority/run.ts b/frontend/docs/lib/generated/snips/typescript/priority/run.ts deleted file mode 100644 index 90b0a7326..000000000 --- a/frontend/docs/lib/generated/snips/typescript/priority/run.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { Priority } from '@hatchet-dev/typescript-sdk/v1';\nimport { priority } from './workflow';\n\nasync function main() {\n try {\n console.log('running priority workflow');\n\n // > Run a Task with a Priority\n const run = priority.run(new Date(Date.now() + 60 * 60 * 1000), { priority: Priority.HIGH });\n\n // > Schedule and cron\n const scheduled = priority.schedule(\n new Date(Date.now() + 60 * 60 * 1000),\n {},\n { priority: Priority.HIGH }\n );\n const delayed = priority.delay(60 * 60 * 1000, {}, { priority: Priority.HIGH });\n const cron = priority.cron(\n `daily-cron-${Math.random()}`,\n '0 0 * * *',\n {},\n { priority: Priority.HIGH }\n );\n\n const [scheduledResult, delayedResult] = await Promise.all([scheduled, delayed]);\n console.log('scheduledResult', scheduledResult);\n console.log('delayedResult', delayedResult);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/priority/run.ts", - "blocks": { - "run_a_task_with_a_priority": { - "start": 9, - "stop": 9 - }, - "schedule_and_cron": { - "start": 12, - "stop": 23 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/priority/worker.ts b/frontend/docs/lib/generated/snips/typescript/priority/worker.ts deleted file mode 100644 index f1a2562fa..000000000 --- a/frontend/docs/lib/generated/snips/typescript/priority/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { priorityTasks } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('priority-worker', {\n workflows: [...priorityTasks],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/priority/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/priority/workflow.ts b/frontend/docs/lib/generated/snips/typescript/priority/workflow.ts deleted file mode 100644 index 4a0f7e67b..000000000 --- a/frontend/docs/lib/generated/snips/typescript/priority/workflow.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { Priority } from '@hatchet-dev/typescript-sdk/v1';\nimport { hatchet } from '../hatchet-client';\n\n// > Simple Task Priority\nexport const priority = hatchet.task({\n name: 'priority',\n defaultPriority: Priority.MEDIUM,\n fn: async (_, ctx) => {\n return {\n priority: ctx.priority(),\n };\n },\n});\n\n// > Task Priority in a Workflow\nexport const priorityWf = hatchet.workflow({\n name: 'priorityWf',\n defaultPriority: Priority.LOW,\n});\n\npriorityWf.task({\n name: 'child-medium',\n fn: async (_, ctx) => {\n return {\n priority: ctx.priority(),\n };\n },\n});\n\npriorityWf.task({\n name: 'child-high',\n // will inherit the default priority from the workflow\n fn: async (_, ctx) => {\n return {\n priority: ctx.priority(),\n };\n },\n});\n\nexport const priorityTasks = [priority, priorityWf];\n", - "source": "out/typescript/priority/workflow.ts", - "blocks": { - "simple_task_priority": { - "start": 5, - "stop": 13 - }, - "task_priority_in_a_workflow": { - "start": 16, - "stop": 19 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/quickstart/gitignore.ts b/frontend/docs/lib/generated/snips/typescript/quickstart/gitignore.ts deleted file mode 100644 index 10c7f0edb..000000000 --- a/frontend/docs/lib/generated/snips/typescript/quickstart/gitignore.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "unknown", - "content": "certs/\n\n# Environments\n.env\nenv/\n\n# TypeScript React\nnode_modules/\ndist/\nbuild/\n\n.DS_Store\n\nindex/index.json\n", - "source": "out/typescript/quickstart/.gitignore", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/quickstart/hatchet-client.ts b/frontend/docs/lib/generated/snips/typescript/quickstart/hatchet-client.ts deleted file mode 100644 index 08a70c51b..000000000 --- a/frontend/docs/lib/generated/snips/typescript/quickstart/hatchet-client.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import Hatchet from '@hatchet-dev/typescript-sdk/sdk';\n\nexport const hatchet = Hatchet.init();\n", - "source": "out/typescript/quickstart/hatchet-client.ts", - "blocks": {}, - "highlights": { - "client": { - "lines": [ - 3 - ], - "strings": [ - "Client" - ] - } - } -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/quickstart/index.ts b/frontend/docs/lib/generated/snips/typescript/quickstart/index.ts deleted file mode 100644 index 2dd5783bd..000000000 --- a/frontend/docs/lib/generated/snips/typescript/quickstart/index.ts +++ /dev/null @@ -1,11 +0,0 @@ -import gitignore from './gitignore'; -import hatchet_client from './hatchet-client'; -import run from './run'; -import worker from './worker'; -import * as workflows from './workflows'; - -export { gitignore } -export { hatchet_client } -export { run } -export { worker } -export { workflows }; diff --git a/frontend/docs/lib/generated/snips/typescript/quickstart/run.ts b/frontend/docs/lib/generated/snips/typescript/quickstart/run.ts deleted file mode 100644 index 1f5ce8e11..000000000 --- a/frontend/docs/lib/generated/snips/typescript/quickstart/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { firstTask } from './workflows/first-task';\n\nasync function main() {\n const res = await firstTask.run({\n Message: 'Hello World!',\n });\n\n console.log(\n 'Finished running task, and got the transformed message! The transformed message is:',\n res.TransformedMessage\n );\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/quickstart/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/quickstart/worker.ts b/frontend/docs/lib/generated/snips/typescript/quickstart/worker.ts deleted file mode 100644 index a87f973c9..000000000 --- a/frontend/docs/lib/generated/snips/typescript/quickstart/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { firstTask } from './workflows/first-task';\n\nasync function main() {\n const worker = await hatchet.worker('first-worker', {\n workflows: [firstTask],\n slots: 10,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/quickstart/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/quickstart/workflows/first-task.ts b/frontend/docs/lib/generated/snips/typescript/quickstart/workflows/first-task.ts deleted file mode 100644 index 968d6de37..000000000 --- a/frontend/docs/lib/generated/snips/typescript/quickstart/workflows/first-task.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../../hatchet-client';\n\ntype SimpleInput = {\n Message: string;\n};\n\ntype SimpleOutput = {\n TransformedMessage: string;\n};\n\nexport const firstTask = hatchet.task({\n name: 'first-task',\n fn: (input: SimpleInput, ctx): SimpleOutput => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - "source": "out/typescript/quickstart/workflows/first-task.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/quickstart/workflows/index.ts b/frontend/docs/lib/generated/snips/typescript/quickstart/workflows/index.ts deleted file mode 100644 index bffbe53db..000000000 --- a/frontend/docs/lib/generated/snips/typescript/quickstart/workflows/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import first_task from './first-task'; - -export { first_task } diff --git a/frontend/docs/lib/generated/snips/typescript/rate_limit/index.ts b/frontend/docs/lib/generated/snips/typescript/rate_limit/index.ts deleted file mode 100644 index 51faeccd8..000000000 --- a/frontend/docs/lib/generated/snips/typescript/rate_limit/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import workflow from './workflow'; - -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/rate_limit/workflow.ts b/frontend/docs/lib/generated/snips/typescript/rate_limit/workflow.ts deleted file mode 100644 index 4417caaa1..000000000 --- a/frontend/docs/lib/generated/snips/typescript/rate_limit/workflow.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { RateLimitDuration } from '@hatchet-dev/typescript-sdk/protoc/v1/workflows';\nimport { hatchet } from '../hatchet-client';\n\n// > Upsert Rate Limit\nhatchet.ratelimits.upsert({\n key: 'api-service-rate-limit',\n limit: 10,\n duration: RateLimitDuration.SECOND,\n});\n\n// > Static\nconst RATE_LIMIT_KEY = 'api-service-rate-limit';\n\nconst task1 = hatchet.task({\n name: 'task1',\n rateLimits: [\n {\n staticKey: RATE_LIMIT_KEY,\n units: 1,\n },\n ],\n fn: (input) => {\n console.log('executed task1');\n },\n});\n\n\n// > Dynamic\nconst task2 = hatchet.task({\n name: 'task2',\n fn: (input: { userId: string }) => {\n console.log('executed task2 for user: ', input.userId);\n },\n rateLimits: [\n {\n dynamicKey: 'input.userId',\n units: 1,\n limit: 10,\n duration: RateLimitDuration.MINUTE,\n },\n ],\n});\n", - "source": "out/typescript/rate_limit/workflow.ts", - "blocks": { - "upsert_rate_limit": { - "start": 5, - "stop": 9 - }, - "static": { - "start": 12, - "stop": 26 - }, - "dynamic": { - "start": 29, - "stop": 42 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/retries/index.ts b/frontend/docs/lib/generated/snips/typescript/retries/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/retries/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/retries/run.ts b/frontend/docs/lib/generated/snips/typescript/retries/run.ts deleted file mode 100644 index d9b2a5bca..000000000 --- a/frontend/docs/lib/generated/snips/typescript/retries/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { retries } from './workflow';\n\nasync function main() {\n try {\n const res = await retries.run({});\n console.log(res);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/retries/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/retries/worker.ts b/frontend/docs/lib/generated/snips/typescript/retries/worker.ts deleted file mode 100644 index cbf46d82e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/retries/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { retries } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('always-fail-worker', {\n workflows: [retries],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/retries/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/retries/workflow.ts b/frontend/docs/lib/generated/snips/typescript/retries/workflow.ts deleted file mode 100644 index 44ea02526..000000000 --- a/frontend/docs/lib/generated/snips/typescript/retries/workflow.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\n\n// > Simple Step Retries\nexport const retries = hatchet.task({\n name: 'retries',\n retries: 3,\n fn: async (_, ctx) => {\n throw new Error('intentional failure');\n },\n});\n\n// > Retries with Count\nexport const retriesWithCount = hatchet.task({\n name: 'retriesWithCount',\n retries: 3,\n fn: async (_, ctx) => {\n // > Get the current retry count\n const retryCount = ctx.retryCount();\n\n console.log(`Retry count: ${retryCount}`);\n\n if (retryCount < 2) {\n throw new Error('intentional failure');\n }\n\n return {\n message: 'success',\n };\n },\n});\n\n// > Retries with Backoff\nexport const withBackoff = hatchet.task({\n name: 'withBackoff',\n retries: 10,\n backoff: {\n // 👀 Maximum number of seconds to wait between retries\n maxSeconds: 10,\n // 👀 Factor to increase the wait time between retries.\n // This sequence will be 2s, 4s, 8s, 10s, 10s, 10s... due to the maxSeconds limit\n factor: 2,\n },\n fn: async () => {\n throw new Error('intentional failure');\n },\n});\n", - "source": "out/typescript/retries/workflow.ts", - "blocks": { - "simple_step_retries": { - "start": 4, - "stop": 10 - }, - "get_the_current_retry_count": { - "start": 18, - "stop": 30 - }, - "retries_with_backoff": { - "start": 33, - "stop": 46 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/bulk.ts b/frontend/docs/lib/generated/snips/typescript/simple/bulk.ts deleted file mode 100644 index 3c2c25948..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/bulk.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simple, SimpleInput } from './workflow';\n\nasync function main() {\n // > Bulk Run a Task\n const res = await simple.run([\n {\n Message: 'HeLlO WoRlD',\n },\n {\n Message: 'Hello MoOn',\n },\n ]);\n\n // 👀 Access the results of the Task\n console.log(res[0].TransformedMessage);\n console.log(res[1].TransformedMessage);\n\n // > Bulk Run Tasks from within a Task\n const parent = hatchet.task({\n name: 'simple',\n fn: async (input: SimpleInput, ctx) => {\n // Bulk run two tasks in parallel\n const child = await ctx.bulkRunChildren([\n {\n workflow: simple,\n input: {\n Message: 'Hello, World!',\n },\n },\n {\n workflow: simple,\n input: {\n Message: 'Hello, Moon!',\n },\n },\n ]);\n\n return {\n TransformedMessage: `${child[0].TransformedMessage} ${child[1].TransformedMessage}`,\n };\n },\n });\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/simple/bulk.ts", - "blocks": { - "bulk_run_a_task": { - "start": 6, - "stop": 17 - }, - "bulk_run_tasks_from_within_a_task": { - "start": 20, - "stop": 43 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/client-run.ts b/frontend/docs/lib/generated/snips/typescript/simple/client-run.ts deleted file mode 100644 index be9840370..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/client-run.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Client Run Methods\nimport { hatchet } from '../hatchet-client';\n\nhatchet.run('simple', { Message: 'Hello, World!' });\n\nhatchet.runNoWait('simple', { Message: 'Hello, World!' }, {});\n\nhatchet.schedules.create('simple', {\n triggerAt: new Date(Date.now() + 1000 * 60 * 60 * 24),\n input: { Message: 'Hello, World!' },\n});\n\nhatchet.crons.create('simple', {\n name: 'my-cron',\n expression: '0 0 * * *',\n input: { Message: 'Hello, World!' },\n});\n", - "source": "out/typescript/simple/client-run.ts", - "blocks": { - "client_run_methods": { - "start": 2, - "stop": 17 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/cron.ts b/frontend/docs/lib/generated/snips/typescript/simple/cron.ts deleted file mode 100644 index 28bd4454e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/cron.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n // > Create\n const cron = await simple.cron('simple-daily', '0 0 * * *', {\n Message: 'hello',\n });\n\n // it may be useful to save the cron id for later\n const cronId = cron.metadata.id;\n\n console.log(cron.metadata.id);\n\n // > Delete\n await hatchet.crons.delete(cronId);\n\n // > List\n const crons = await hatchet.crons.list({\n workflow: simple,\n });\n\n console.log(crons);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/simple/cron.ts", - "blocks": { - "create": { - "start": 6, - "stop": 11 - }, - "delete": { - "start": 16, - "stop": 16 - }, - "list": { - "start": 19, - "stop": 21 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/delay.ts b/frontend/docs/lib/generated/snips/typescript/simple/delay.ts deleted file mode 100644 index a98fe7f79..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/delay.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n const tomorrow = 24 * 60 * 60; // 1 day\n const scheduled = await simple.delay(tomorrow, {\n Message: 'hello',\n });\n\n console.log(scheduled.metadata.id);\n\n await hatchet.schedules.delete(scheduled);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/simple/delay.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/enqueue.ts b/frontend/docs/lib/generated/snips/typescript/simple/enqueue.ts deleted file mode 100644 index b2c0f5812..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/enqueue.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { SimpleOutput } from './stub-workflow';\n// > Enqueuing a Workflow (Fire and Forget)\nimport { simple } from './workflow';\n// ...\n\nasync function main() {\n // 👀 Enqueue the workflow\n const run = await simple.runNoWait({\n Message: 'hello',\n });\n\n // 👀 Get the run ID of the workflow\n const runId = await run.getWorkflowRunId();\n // It may be helpful to store the run ID of the workflow\n // in a database or other persistent storage for later use\n console.log(runId);\n\n // > Subscribing to results\n // the return object of the enqueue method is a WorkflowRunRef which includes a listener for the result of the workflow\n const result = await run.result();\n console.log(result);\n\n // if you need to subscribe to the result of the workflow at a later time, you can use the runRef method and the stored runId\n const ref = hatchet.runRef(runId);\n const result2 = await ref.result();\n console.log(result2);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/simple/enqueue.ts", - "blocks": { - "enqueuing_a_workflow_fire_and_forget": { - "start": 4, - "stop": 17 - }, - "subscribing_to_results": { - "start": 20, - "stop": 27 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/index.ts b/frontend/docs/lib/generated/snips/typescript/simple/index.ts deleted file mode 100644 index 34b5fcc22..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/index.ts +++ /dev/null @@ -1,23 +0,0 @@ -import bulk from './bulk'; -import client_run from './client-run'; -import cron from './cron'; -import delay from './delay'; -import enqueue from './enqueue'; -import run from './run'; -import schedule from './schedule'; -import stub_workflow from './stub-workflow'; -import worker from './worker'; -import workflow_with_child from './workflow-with-child'; -import workflow from './workflow'; - -export { bulk } -export { client_run } -export { cron } -export { delay } -export { enqueue } -export { run } -export { schedule } -export { stub_workflow } -export { worker } -export { workflow_with_child } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/simple/run.ts b/frontend/docs/lib/generated/snips/typescript/simple/run.ts deleted file mode 100644 index 06329f6fd..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/run.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\nimport { parent } from './workflow-with-child';\n\nasync function main() {\n // > Running a Task\n const res = await parent.run(\n {\n Message: 'HeLlO WoRlD',\n },\n {\n additionalMetadata: {\n test: 'test',\n },\n }\n );\n\n // 👀 Access the results of the Task\n console.log(res.TransformedMessage);\n}\n\nexport async function extra() {\n // > Running Multiple Tasks\n const res1 = simple.run({\n Message: 'HeLlO WoRlD',\n });\n\n const res2 = simple.run({\n Message: 'Hello MoOn',\n });\n\n const results = await Promise.all([res1, res2]);\n\n console.log(results[0].TransformedMessage);\n console.log(results[1].TransformedMessage);\n\n // > Spawning Tasks from within a Task\n const parentTask = hatchet.task({\n name: 'parent',\n fn: async (input, ctx) => {\n // Simply the task and it will be spawned from the parent task\n const child = await simple.run({\n Message: 'HeLlO WoRlD',\n });\n\n return {\n result: child.TransformedMessage,\n };\n },\n });\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => {\n process.exit(0);\n });\n}\n", - "source": "out/typescript/simple/run.ts", - "blocks": { - "running_a_task": { - "start": 7, - "stop": 19 - }, - "running_multiple_tasks": { - "start": 24, - "stop": 35 - }, - "spawning_tasks_from_within_a_task": { - "start": 38, - "stop": 50 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/schedule.ts b/frontend/docs/lib/generated/snips/typescript/simple/schedule.ts deleted file mode 100644 index a28c43bc4..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/schedule.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\n\nasync function main() {\n // > Create a Scheduled Run\n\n const runAt = new Date(new Date().setHours(12, 0, 0, 0) + 24 * 60 * 60 * 1000);\n\n const scheduled = await simple.schedule(runAt, {\n Message: 'hello',\n });\n\n // 👀 Get the scheduled run ID of the workflow\n // it may be helpful to store the scheduled run ID of the workflow\n // in a database or other persistent storage for later use\n const scheduledRunId = scheduled.metadata.id;\n console.log(scheduledRunId);\n\n // > Delete a Scheduled Run\n await hatchet.scheduled.delete(scheduled);\n\n // > List Scheduled Runs\n const scheduledRuns = await hatchet.scheduled.list({\n workflow: simple,\n });\n console.log(scheduledRuns);\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/simple/schedule.ts", - "blocks": { - "create_a_scheduled_run": { - "start": 6, - "stop": 17 - }, - "delete_a_scheduled_run": { - "start": 20, - "stop": 20 - }, - "list_scheduled_runs": { - "start": 23, - "stop": 26 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/stub-workflow.ts b/frontend/docs/lib/generated/snips/typescript/simple/stub-workflow.ts deleted file mode 100644 index b45298350..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/stub-workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring an External Workflow Reference\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// (optional) Define the output type for the workflow\nexport type SimpleOutput = {\n 'to-lower': {\n TransformedMessage: string;\n };\n};\n\n// declare the workflow with the same name as the\n// workflow name on the worker\nexport const simple = hatchet.workflow({\n name: 'simple',\n});\n\n// you can use all the same run methods on the stub\n// with full type-safety\nsimple.run({ Message: 'Hello, World!' });\nsimple.runNoWait({ Message: 'Hello, World!' });\nsimple.schedule(new Date(), { Message: 'Hello, World!' });\nsimple.cron('my-cron', '0 0 * * *', { Message: 'Hello, World!' });\n", - "source": "out/typescript/simple/stub-workflow.ts", - "blocks": { - "declaring_an_external_workflow_reference": { - "start": 2, - "stop": 27 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/worker.ts b/frontend/docs/lib/generated/snips/typescript/simple/worker.ts deleted file mode 100644 index b0b4d9907..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { simple } from './workflow';\nimport { parent, child } from './workflow-with-child';\n\nasync function main() {\n const worker = await hatchet.worker('simple-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [simple, parent, child],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 100,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/simple/worker.ts", - "blocks": { - "declaring_a_worker": { - "start": 2, - "stop": 19 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/workflow-with-child.ts b/frontend/docs/lib/generated/snips/typescript/simple/workflow-with-child.ts deleted file mode 100644 index 7d29b4646..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/workflow-with-child.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring a Task\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type ChildInput = {\n Message: string;\n};\n\nexport type ParentInput = {\n Message: string;\n};\n\nexport const child = hatchet.workflow({\n name: 'child',\n});\n\nexport const child1 = child.task({\n name: 'child1',\n fn: (input: ChildInput, ctx) => {\n ctx.logger.info('hello from the child1', { hello: 'moon' });\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nexport const child2 = child.task({\n name: 'child2',\n fn: (input: ChildInput, ctx) => {\n ctx.logger.info('hello from the child2');\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nexport const child3 = child.task({\n name: 'child3',\n parents: [child1, child2],\n fn: (input: ChildInput, ctx) => {\n ctx.logger.info('hello from the child3');\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\nexport const parent = hatchet.task({\n name: 'parent',\n fn: async (input: ParentInput, ctx) => {\n const c = await ctx.runChild(child, {\n Message: input.Message,\n });\n\n return {\n TransformedMessage: 'not implemented',\n };\n },\n});\n\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - "source": "out/typescript/simple/workflow-with-child.ts", - "blocks": { - "declaring_a_task": { - "start": 2, - "stop": 60 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/simple/workflow.ts b/frontend/docs/lib/generated/snips/typescript/simple/workflow.ts deleted file mode 100644 index 923ba7a04..000000000 --- a/frontend/docs/lib/generated/snips/typescript/simple/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring a Task\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\nexport const simple = hatchet.task({\n name: 'simple',\n retries: 3,\n fn: async (input: SimpleInput) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - "source": "out/typescript/simple/workflow.ts", - "blocks": { - "declaring_a_task": { - "start": 2, - "stop": 18 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/sticky/index.ts b/frontend/docs/lib/generated/snips/typescript/sticky/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/sticky/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/sticky/run.ts b/frontend/docs/lib/generated/snips/typescript/sticky/run.ts deleted file mode 100644 index dc07de293..000000000 --- a/frontend/docs/lib/generated/snips/typescript/sticky/run.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { retries } from '../retries/workflow';\n\nasync function main() {\n try {\n const res = await retries.run({});\n console.log(res);\n } catch (e) {\n console.log('error', e);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/sticky/run.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/sticky/worker.ts b/frontend/docs/lib/generated/snips/typescript/sticky/worker.ts deleted file mode 100644 index 279aeb59f..000000000 --- a/frontend/docs/lib/generated/snips/typescript/sticky/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { retries } from '../retries/workflow';\n\nasync function main() {\n const worker = await hatchet.worker('always-fail-worker', {\n workflows: [retries],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/sticky/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/sticky/workflow.ts b/frontend/docs/lib/generated/snips/typescript/sticky/workflow.ts deleted file mode 100644 index be18feb29..000000000 --- a/frontend/docs/lib/generated/snips/typescript/sticky/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { StickyStrategy } from '@hatchet-dev/typescript-sdk/protoc/workflows';\nimport { hatchet } from '../hatchet-client';\nimport { child } from '../child_workflows/workflow';\n\n// > Sticky Task\nexport const sticky = hatchet.task({\n name: 'sticky',\n retries: 3,\n sticky: StickyStrategy.SOFT,\n fn: async (_, ctx) => {\n // specify a child workflow to run on the same worker\n const result = await child.run(\n {\n N: 1,\n },\n { sticky: true }\n );\n\n return {\n result,\n };\n },\n});\n", - "source": "out/typescript/sticky/workflow.ts", - "blocks": { - "sticky_task": { - "start": 6, - "stop": 23 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/streaming/index.ts b/frontend/docs/lib/generated/snips/typescript/streaming/index.ts deleted file mode 100644 index ae93c09d1..000000000 --- a/frontend/docs/lib/generated/snips/typescript/streaming/index.ts +++ /dev/null @@ -1,9 +0,0 @@ -import nextjs_proxy from './nextjs-proxy'; -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { nextjs_proxy } -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/streaming/nextjs-proxy.ts b/frontend/docs/lib/generated/snips/typescript/streaming/nextjs-proxy.ts deleted file mode 100644 index 76bc7df89..000000000 --- a/frontend/docs/lib/generated/snips/typescript/streaming/nextjs-proxy.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { Readable } from 'stream';\nimport { hatchet } from '../hatchet-client';\nimport { streamingTask } from './workflow';\n\n// > NextJS Proxy\nexport async function GET() {\n try {\n const ref = await streamingTask.runNoWait({});\n const workflowRunId = await ref.getWorkflowRunId();\n\n const stream = Readable.from(hatchet.runs.subscribeToStream(workflowRunId));\n\n // @ts-ignore\n return new Response(Readable.toWeb(stream), {\n headers: {\n 'Content-Type': 'text/plain',\n 'Cache-Control': 'no-cache',\n Connection: 'keep-alive',\n },\n });\n } catch (error) {\n return new Response('Internal Server Error', { status: 500 });\n }\n}\n", - "source": "out/typescript/streaming/nextjs-proxy.ts", - "blocks": { - "nextjs_proxy": { - "start": 6, - "stop": 24 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/streaming/run.ts b/frontend/docs/lib/generated/snips/typescript/streaming/run.ts deleted file mode 100644 index d32c6714e..000000000 --- a/frontend/docs/lib/generated/snips/typescript/streaming/run.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { streamingTask } from './workflow';\nimport { hatchet } from '../hatchet-client';\n\nasync function main() {\n // > Consume\n const ref = await streamingTask.runNoWait({});\n const id = await ref.getWorkflowRunId();\n\n for await (const content of hatchet.runs.subscribeToStream(id)) {\n process.stdout.write(content);\n }\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => {\n process.exit(0);\n });\n}\n", - "source": "out/typescript/streaming/run.ts", - "blocks": { - "consume": { - "start": 6, - "stop": 11 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/streaming/worker.ts b/frontend/docs/lib/generated/snips/typescript/streaming/worker.ts deleted file mode 100644 index 3e5ccf3ca..000000000 --- a/frontend/docs/lib/generated/snips/typescript/streaming/worker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import { hatchet } from '../hatchet-client';\nimport { streamingTask } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('streaming-worker', {\n workflows: [streamingTask],\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/streaming/worker.ts", - "blocks": {}, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/streaming/workflow.ts b/frontend/docs/lib/generated/snips/typescript/streaming/workflow.ts deleted file mode 100644 index d091bf23a..000000000 --- a/frontend/docs/lib/generated/snips/typescript/streaming/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "import sleep from '../../../util/sleep';\nimport { hatchet } from '../hatchet-client';\n\n// > Streaming\nconst annaKarenina = `\nHappy families are all alike; every unhappy family is unhappy in its own way.\n\nEverything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him.\n`;\n\nfunction* createChunks(content: string, n: number): Generator {\n for (let i = 0; i < content.length; i += n) {\n yield content.slice(i, i + n);\n }\n}\n\nexport const streamingTask = hatchet.task({\n name: 'stream-example',\n fn: async (_, ctx) => {\n await sleep(2000);\n\n for (const chunk of createChunks(annaKarenina, 10)) {\n ctx.putStream(chunk);\n await sleep(200);\n }\n },\n});\n\n", - "source": "out/typescript/streaming/workflow.ts", - "blocks": { - "streaming": { - "start": 5, - "stop": 28 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/timeouts/index.ts b/frontend/docs/lib/generated/snips/typescript/timeouts/index.ts deleted file mode 100644 index 3ba5a55ec..000000000 --- a/frontend/docs/lib/generated/snips/typescript/timeouts/index.ts +++ /dev/null @@ -1,7 +0,0 @@ -import run from './run'; -import worker from './worker'; -import workflow from './workflow'; - -export { run } -export { worker } -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/timeouts/run.ts b/frontend/docs/lib/generated/snips/typescript/timeouts/run.ts deleted file mode 100644 index ac2e14e3c..000000000 --- a/frontend/docs/lib/generated/snips/typescript/timeouts/run.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Running a Task with Results\nimport { cancellation } from './workflow';\n// ...\nasync function main() {\n // 👀 Run the workflow with results\n const res = await cancellation.run({});\n\n // 👀 Access the results of the workflow\n console.log(res.Completed);\n}\n\nif (require.main === module) {\n main()\n .catch(console.error)\n .finally(() => process.exit(0));\n}\n", - "source": "out/typescript/timeouts/run.ts", - "blocks": { - "running_a_task_with_results": { - "start": 2, - "stop": 9 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/timeouts/worker.ts b/frontend/docs/lib/generated/snips/typescript/timeouts/worker.ts deleted file mode 100644 index 56448604a..000000000 --- a/frontend/docs/lib/generated/snips/typescript/timeouts/worker.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring a Worker\nimport { hatchet } from '../hatchet-client';\nimport { cancellation } from './workflow';\n\nasync function main() {\n const worker = await hatchet.worker('cancellation-worker', {\n // 👀 Declare the workflows that the worker can execute\n workflows: [cancellation],\n // 👀 Declare the number of concurrent task runs the worker can accept\n slots: 100,\n });\n\n await worker.start();\n}\n\nif (require.main === module) {\n main();\n}\n", - "source": "out/typescript/timeouts/worker.ts", - "blocks": { - "declaring_a_worker": { - "start": 2, - "stop": 18 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/timeouts/workflow.ts b/frontend/docs/lib/generated/snips/typescript/timeouts/workflow.ts deleted file mode 100644 index e70a8cac5..000000000 --- a/frontend/docs/lib/generated/snips/typescript/timeouts/workflow.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring a Task\nimport sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport const cancellation = hatchet.task({\n name: 'cancellation',\n executionTimeout: '3s',\n fn: async (_, { cancelled }) => {\n await sleep(10 * 1000);\n\n if (cancelled) {\n throw new Error('Task was cancelled');\n }\n\n return {\n Completed: true,\n };\n },\n});\n\n// see ./worker.ts and ./run.ts for how to run the workflow\n", - "source": "out/typescript/timeouts/workflow.ts", - "blocks": { - "declaring_a_task": { - "start": 2, - "stop": 20 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/with_timeouts/index.ts b/frontend/docs/lib/generated/snips/typescript/with_timeouts/index.ts deleted file mode 100644 index 51faeccd8..000000000 --- a/frontend/docs/lib/generated/snips/typescript/with_timeouts/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import workflow from './workflow'; - -export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/with_timeouts/workflow.ts b/frontend/docs/lib/generated/snips/typescript/with_timeouts/workflow.ts deleted file mode 100644 index f696704f8..000000000 --- a/frontend/docs/lib/generated/snips/typescript/with_timeouts/workflow.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { Snippet } from '@/lib/generated/snips/types'; - -const snippet: Snippet = { - "language": "typescript ", - "content": "// > Declaring a Task\nimport sleep from '@hatchet-dev/typescript-sdk/util/sleep';\nimport { hatchet } from '../hatchet-client';\n\n// (optional) Define the input type for the workflow\nexport type SimpleInput = {\n Message: string;\n};\n\n// > Execution Timeout\nexport const withTimeouts = hatchet.task({\n name: 'with-timeouts',\n // time the task can wait in the queue before it is cancelled\n scheduleTimeout: '10s',\n // time the task can run before it is cancelled\n executionTimeout: '10s',\n fn: async (input: SimpleInput, ctx) => {\n // wait 15 seconds\n await sleep(15000);\n\n // get the abort controller\n const { abortController } = ctx;\n\n // if the abort controller is aborted, throw an error\n if (abortController.signal.aborted) {\n throw new Error('cancelled');\n }\n\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\n// > Refresh Timeout\nexport const refreshTimeout = hatchet.task({\n name: 'refresh-timeout',\n executionTimeout: '10s',\n scheduleTimeout: '10s',\n fn: async (input: SimpleInput, ctx) => {\n // adds 15 seconds to the execution timeout\n ctx.refreshTimeout('15s');\n await sleep(15000);\n\n // get the abort controller\n const { abortController } = ctx;\n\n // now this condition will not be met\n // if the abort controller is aborted, throw an error\n if (abortController.signal.aborted) {\n throw new Error('cancelled');\n }\n\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n", - "source": "out/typescript/with_timeouts/workflow.ts", - "blocks": { - "execution_timeout": { - "start": 11, - "stop": 33 - }, - "refresh_timeout": { - "start": 36, - "stop": 58 - } - }, - "highlights": {} -}; - -export default snippet; diff --git a/frontend/docs/package.json b/frontend/docs/package.json index 6217b8e06..fd512cba2 100644 --- a/frontend/docs/package.json +++ b/frontend/docs/package.json @@ -7,7 +7,7 @@ "taskfile-dev": "pnpm run generate-examples && next dev", "dev": "task docs", "only:dev": "next dev", - "build": "next build", + "build": "pnpm run generate-examples && next build", "start": "next start", "lint:check": "npm run prettier:check", "lint:fix": "npm run prettier:fix", @@ -54,7 +54,7 @@ "@types/react": "^18.3.23", "@types/react-dom": "^18.3.7", "prettier": "^3.5.3", - "typescript": "^4.9.5" + "typescript": "^5.8.2" }, "resolutions": { "nanoid": "^3.3.8", diff --git a/frontend/docs/pages/home/compute/auto-scaling.mdx b/frontend/docs/pages/home/compute/auto-scaling.mdx index 99a9aaf8f..e98cb692d 100644 --- a/frontend/docs/pages/home/compute/auto-scaling.mdx +++ b/frontend/docs/pages/home/compute/auto-scaling.mdx @@ -79,12 +79,10 @@ The number of replicas to add or remove during each scaling event. 1. **Start Conservative**: Begin with moderate thresholds (e.g., 0.75 for scale-up and 0.25 for scale-down) and adjust based on your workload patterns. 2. **Tune Wait Duration**: - - Shorter durations (e.g., 1m) work well for bursty workloads - Longer durations (e.g., 5m) are better for stable, predictable loads 3. **Rolling Window**: - - Shorter windows (2-5m) provide faster response to changes - Longer windows (10m+) provide more stable scaling behavior diff --git a/frontend/docs/pages/home/compute/cpu.mdx b/frontend/docs/pages/home/compute/cpu.mdx index 88b3fb251..8aa425fa8 100644 --- a/frontend/docs/pages/home/compute/cpu.mdx +++ b/frontend/docs/pages/home/compute/cpu.mdx @@ -77,19 +77,16 @@ The `num_replicas` parameter determines the total number of machines that will r ## Best Practices 1. **Resource Allocation** - - Start with minimum required resources - Scale up based on monitoring and performance needs - Consider using performance CPUs for production workloads 2. **Region Selection** - - Select regions close to your data sources and users - Include multiple regions for global availability - Consider selecting regions in different geographical areas for better redundancy 3. **Memory Configuration** - - Stay within the allowed memory ranges for your CPU type - Monitor memory usage to optimize allocation - Consider workload memory requirements when selecting CPU type diff --git a/frontend/docs/pages/home/compute/environment-variables.mdx b/frontend/docs/pages/home/compute/environment-variables.mdx index d1ebf949d..52664a8ff 100644 --- a/frontend/docs/pages/home/compute/environment-variables.mdx +++ b/frontend/docs/pages/home/compute/environment-variables.mdx @@ -34,19 +34,16 @@ You can configure environment variables through: ## Best Practices 1. **Security** - - Never commit sensitive values to version control - Use Hatchet's secrets management for sensitive data - Rotate sensitive values regularly 2. **Naming Conventions** - - Use descriptive, meaningful names - Follow a consistent naming pattern - Document non-obvious variables 3. **Value Management** - - Use appropriate data types - Validate values before deployment - Keep values consistent across related services @@ -99,13 +96,11 @@ Environment variables are mounted into your worker containers at runtime. You ca ## Troubleshooting 1. **Variables Not Available** - - Verify the variable is correctly set in your configuration - Check for typos in variable names - Ensure the service has been redeployed after changes 2. **Incorrect Values** - - Check for proper escaping of special characters - Verify the value format is correct - Look for conflicting definitions diff --git a/frontend/docs/pages/home/compute/git-ops.mdx b/frontend/docs/pages/home/compute/git-ops.mdx index 838264a99..4bb972d02 100644 --- a/frontend/docs/pages/home/compute/git-ops.mdx +++ b/frontend/docs/pages/home/compute/git-ops.mdx @@ -24,13 +24,11 @@ GitOps in Hatchet Compute means that: ### Repository Setup 1. **Github Account** - - Select your GitHub account from the dropdown - If your account isn't listed, click "Link a new repository" to connect your GitHub account - Hatchet requires GitHub repository access to enable GitOps workflows 2. **Github Repository** - - Choose the repository containing your service code - The repository should contain your application code and Dockerfile(s) - Ensure Hatchet has the necessary permissions to access your repository @@ -44,7 +42,6 @@ GitOps in Hatchet Compute means that: ### Build Settings 1. **Build Directory** - - Specify the directory containing your service code - This is the root directory where your build will run - Example: `.` for repository root, or `services/myservice` for a monorepo @@ -96,7 +93,6 @@ Your Dockerfile needs to be properly configured to run Hatchet workers. See our ``` 2. **Dockerfile Guidelines** - - Use multi-stage builds to optimize image size - Specify exact versions for base images - Include only necessary files using `.dockerignore` @@ -104,7 +100,6 @@ Your Dockerfile needs to be properly configured to run Hatchet workers. See our - Cache dependencies effectively 3. **Branch Management** - - Use feature branches for development - Set up branch protection rules - Consider using environment-specific branches (e.g., `main` for production, `staging` for staging) @@ -139,14 +134,12 @@ You can monitor your deployments in the Hatchet UI: Common issues and solutions: 1. **Build Failures** - - Check your Dockerfile syntax - Verify build context is correct - Ensure all required files are included - Review build logs in the Hatchet UI 2. **Permission Issues** - - Verify GitHub repository access for Hatchet users who need to manage configuration - Ensure correct branch configuration for your service diff --git a/frontend/docs/pages/home/compute/gpu.mdx b/frontend/docs/pages/home/compute/gpu.mdx index c29eb63fa..a52776542 100644 --- a/frontend/docs/pages/home/compute/gpu.mdx +++ b/frontend/docs/pages/home/compute/gpu.mdx @@ -137,19 +137,16 @@ compute = Compute( ## Best Practices 1. **GPU Selection** - - Choose GPU type based on workload requirements - Consider memory requirements for your models - Factor in regional availability 2. **Docker Optimization** - - Use specific library versions instead of meta-packages - Implement multi-stage builds to reduce image size - Only install required CUDA libraries 3. **Region Strategy** - - Select regions based on data locality - Consider backup regions for redundancy - Remember that replicas are randomly distributed across specified regions diff --git a/frontend/docs/pages/self-hosting/prometheus-metrics.mdx b/frontend/docs/pages/self-hosting/prometheus-metrics.mdx index 8e752f0f6..2150ee9f7 100644 --- a/frontend/docs/pages/self-hosting/prometheus-metrics.mdx +++ b/frontend/docs/pages/self-hosting/prometheus-metrics.mdx @@ -11,13 +11,11 @@ This document provides an overview of the Prometheus metrics exposed by Hatchet, To enable and configure the Prometheus metrics endpoint in your Hatchet server, set the following environment variables (bound to Viper keys as shown): - **`SERVER_PROMETHEUS_ENABLED`** (`prometheus.enabled`) - - Type: boolean - Default: `false` - Description: Enables or disables the Prometheus metrics HTTP server. - **`SERVER_PROMETHEUS_ADDRESS`** (`prometheus.address`) - - Type: string - Default: `":9090"` - Description: The network address and port to bind the Prometheus metrics server to. @@ -30,17 +28,14 @@ To enable and configure the Prometheus metrics endpoint in your Hatchet server, If you have set up a Prometheus instance to scrape Hatchet metrics, you can enable the [tenant API endpoint](/home/prometheus-metrics) by setting the following variables: - **`SERVER_PROMETHEUS_SERVER_URL`** (`prometheus.prometheusServerURL`) - - Type: string - Description: The Prometheus server URL. - **`SERVER_PROMETHEUS_SERVER_USERNAME`** (`prometheus.prometheusServerUsername`) - - Type: string - Description: The username to access the Prometheus instance via HTTP basic auth. - **`SERVER_PROMETHEUS_SERVER_PASSWORD`** (`prometheus.prometheusServerPassword`) - - Type: string - Description: The password to access the Prometheus instance via HTTP basic auth. diff --git a/frontend/docs/pnpm-lock.yaml b/frontend/docs/pnpm-lock.yaml index b58cb42b1..63727497b 100644 --- a/frontend/docs/pnpm-lock.yaml +++ b/frontend/docs/pnpm-lock.yaml @@ -20,7 +20,7 @@ importers: version: 1.2.3(@types/react@18.3.23)(react@18.3.1) autoprefixer: specifier: ^10.4.21 - version: 10.4.21(postcss@8.5.4) + version: 10.4.21(postcss@8.5.6) class-variance-authority: specifier: ^0.7.1 version: 0.7.1 @@ -38,19 +38,19 @@ importers: version: 0.459.0(react@18.3.1) next: specifier: ^14.2.29 - version: 14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 14.2.31(react-dom@18.3.1)(react@18.3.1) nextra: specifier: ^3.3.1 - version: 3.3.1(@types/react@18.3.23)(acorn@8.14.1)(next@14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@4.9.5) + version: 3.3.1(@types/react@18.3.23)(acorn@8.15.0)(next@14.2.31)(react-dom@18.3.1)(react@18.3.1)(typescript@5.9.2) nextra-theme-docs: specifier: ^3.3.1 - version: 3.3.1(next@14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(nextra@3.3.1(@types/react@18.3.23)(acorn@8.14.1)(next@14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@4.9.5))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 3.3.1(next@14.2.31)(nextra@3.3.1)(react-dom@18.3.1)(react@18.3.1) postcss: specifier: ^8.5.4 - version: 8.5.4 + version: 8.5.6 posthog-js: specifier: ^1.249.3 - version: 1.249.3 + version: 1.260.1 react: specifier: ^18.3.1 version: 18.3.1 @@ -62,10 +62,10 @@ importers: version: 2.1.0(react@18.3.1) react-tweet: specifier: ^3.2.2 - version: 3.2.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 3.2.2(react-dom@18.3.1)(react@18.3.1) recharts: specifier: ^2.15.3 - version: 2.15.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + version: 2.15.4(react-dom@18.3.1)(react@18.3.1) shiki: specifier: ^1.29.2 version: 1.29.2 @@ -90,10 +90,10 @@ importers: version: 18.3.7(@types/react@18.3.23) prettier: specifier: ^3.5.3 - version: 3.5.3 + version: 3.6.2 typescript: - specifier: ^4.9.5 - version: 4.9.5 + specifier: ^5.8.2 + version: 5.9.2 packages: @@ -107,8 +107,8 @@ packages: '@antfu/utils@8.1.1': resolution: {integrity: sha512-Mex9nXf9vR6AhcXmMrlz/HVgYYZpVGJ6YlPgwl7UnaFpnshXs6EK/oa5Gpf3CzENMjkvEx2tQtntGnb7UtSTOQ==} - '@babel/runtime@7.27.6': - resolution: {integrity: sha512-vbavdySgbTTrmFE+EsiqUTzlOr5bzlnJtUv9PynGCAKvfQqjIXbvFdumPM/GxMDfyuGMJaJAU6TO4zc1Jf1i8Q==} + '@babel/runtime@7.28.3': + resolution: {integrity: sha512-9uIQ10o0WGdpP6GDhXcdOJPJuDgFtIDtN/9+ArJQ2NAfAmiuhTQdzkaTGR33v43GYS2UrSA0eX2pPPHoFVvpxA==} engines: {node: '>=6.9.0'} '@braintree/sanitize-url@7.1.1': @@ -129,14 +129,14 @@ packages: '@chevrotain/utils@11.0.3': resolution: {integrity: sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==} - '@floating-ui/core@1.7.1': - resolution: {integrity: sha512-azI0DrjMMfIug/ExbBaeDVJXcY0a7EPvPjb2xAJPa4HeimBX+Z18HK8QQR3jb6356SnDDdxx+hinMLcJEDdOjw==} + '@floating-ui/core@1.7.3': + resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==} - '@floating-ui/dom@1.7.1': - resolution: {integrity: sha512-cwsmW/zyw5ltYTUeeYJ60CnQuPqmGwuGVhG9w0PRaRKkAyi38BT5CKrpIbb+jtahSwUl04cWzSx9ZOIxeS6RsQ==} + '@floating-ui/dom@1.7.3': + resolution: {integrity: sha512-uZA413QEpNuhtb3/iIKoYMSK07keHPYeXF02Zhd6e213j+d1NamLix/mCLxBUDW/Gx52sPH2m+chlUsyaBs/Ag==} - '@floating-ui/react-dom@2.1.3': - resolution: {integrity: sha512-huMBfiU9UnQ2oBwIhgzyIiSpVgvlDstU8CX0AF+wS+KzmYMs0J2a3GwuFHV1Lz+jlrQGeC1fF+Nv0QoumyV0bA==} + '@floating-ui/react-dom@2.1.5': + resolution: {integrity: sha512-HDO/1/1oH9fjj4eLgegrlH3dklZpHtUYYFiVwMUwfGvk9jWDRWqkklA2/NFScknrcNSspbV868WjXORvreDX+Q==} peerDependencies: react: '>=16.8.0' react-dom: '>=16.8.0' @@ -147,14 +147,14 @@ packages: react: '>=16.8.0' react-dom: '>=16.8.0' - '@floating-ui/utils@0.2.9': - resolution: {integrity: sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==} + '@floating-ui/utils@0.2.10': + resolution: {integrity: sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==} '@formatjs/intl-localematcher@0.5.10': resolution: {integrity: sha512-af3qATX+m4Rnd9+wHcjJ4w2ijq+rAVP3CCinJQvFv1kgSu1W6jypUmvleJxcewdxmutM8dmIRZFxO/IQBZmP2Q==} - '@headlessui/react@2.2.4': - resolution: {integrity: sha512-lz+OGcAH1dK93rgSMzXmm1qKOJkBUqZf1L4M8TWLNplftQD3IkoEDdUFNfAn4ylsN6WOTVtWaLmvmaHOUk1dTA==} + '@headlessui/react@2.2.7': + resolution: {integrity: sha512-WKdTymY8Y49H8/gUc/lIyYK1M+/6dq0Iywh4zTZVAaiTDprRfioxSgD0wnXTQTBpjpGJuTL1NO/mqEvc//5SSg==} engines: {node: '>=10'} peerDependencies: react: ^18 || ^19 || ^19.0.0-rc @@ -170,23 +170,18 @@ packages: resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} - '@jridgewell/gen-mapping@0.3.8': - resolution: {integrity: sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==} - engines: {node: '>=6.0.0'} + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} '@jridgewell/resolve-uri@3.1.2': resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} engines: {node: '>=6.0.0'} - '@jridgewell/set-array@1.2.1': - resolution: {integrity: sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==} - engines: {node: '>=6.0.0'} + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} - '@jridgewell/sourcemap-codec@1.5.0': - resolution: {integrity: sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==} - - '@jridgewell/trace-mapping@0.3.25': - resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==} + '@jridgewell/trace-mapping@0.3.30': + resolution: {integrity: sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==} '@mdx-js/mdx@3.1.0': resolution: {integrity: sha512-/QxEhPAvGwbQmy1Px8F899L5Uc2KZ6JtXwlCgJmjSTBedwOZkByYcBG4GceIGPXRDsmfxhHazuS+hlOShRLeDw==} @@ -197,150 +192,156 @@ packages: '@types/react': '>=16' react: '>=16' - '@mermaid-js/parser@0.4.0': - resolution: {integrity: sha512-wla8XOWvQAwuqy+gxiZqY+c7FokraOTHRWMsbB4AgRx9Sy7zKslNyejy7E+a77qHfey5GXw/ik3IXv/NHMJgaA==} + '@mermaid-js/parser@0.6.2': + resolution: {integrity: sha512-+PO02uGF6L6Cs0Bw8RpGhikVvMWEysfAyl27qTlroUB8jSWr1lL0Sf6zi78ZxlSnmgSY2AMMKVgghnN9jTtwkQ==} - '@napi-rs/simple-git-android-arm-eabi@0.1.19': - resolution: {integrity: sha512-XryEH/hadZ4Duk/HS/HC/cA1j0RHmqUGey3MsCf65ZS0VrWMqChXM/xlTPWuY5jfCc/rPubHaqI7DZlbexnX/g==} + '@napi-rs/simple-git-android-arm-eabi@0.1.21': + resolution: {integrity: sha512-NSZTD3c+RzSR3cg0acPFqUfV64+Vqye4Veda5L9fbbnsYRzziYHbL3alMI/6p5Ur44ezw5RqKRZ4Tbp1T08veA==} engines: {node: '>= 10'} cpu: [arm] os: [android] - '@napi-rs/simple-git-android-arm64@0.1.19': - resolution: {integrity: sha512-ZQ0cPvY6nV9p7zrR9ZPo7hQBkDAcY/CHj3BjYNhykeUCiSNCrhvwX+WEeg5on8M1j4d5jcI/cwVG2FslfiByUg==} + '@napi-rs/simple-git-android-arm64@0.1.21': + resolution: {integrity: sha512-FR8J/pmy4nFyzWon0RuhReucociF8kiCTBRBQV+TN+7tmI2lsHp+8sDNh1HXf+UP0iOC8azMTKwcHiyhv7oPPw==} engines: {node: '>= 10'} cpu: [arm64] os: [android] - '@napi-rs/simple-git-darwin-arm64@0.1.19': - resolution: {integrity: sha512-viZB5TYgjA1vH+QluhxZo0WKro3xBA+1xSzYx8mcxUMO5gnAoUMwXn0ZO/6Zy6pai+aGae+cj6XihGnrBRu3Pg==} + '@napi-rs/simple-git-darwin-arm64@0.1.21': + resolution: {integrity: sha512-FoUck6Sv1VJZyEZ7CcAKPDystkwnv4osJs3XCXCtx6AdT/8oDyfjMHxe3CSBMcNVjawsd+tG7yFvhgKJCdjBvA==} engines: {node: '>= 10'} cpu: [arm64] os: [darwin] - '@napi-rs/simple-git-darwin-x64@0.1.19': - resolution: {integrity: sha512-6dNkzSNUV5X9rsVYQbpZLyJu4Gtkl2vNJ3abBXHX/Etk0ILG5ZasO3ncznIANZQpqcbn/QPHr49J2QYAXGoKJA==} + '@napi-rs/simple-git-darwin-x64@0.1.21': + resolution: {integrity: sha512-PlqZgFhcwJmdj4j/LSLxddrAZU5KbDtq54UMp35++IFs+XkK0SnJlfB9oQTE+m6ieQPYQVz6S9cLeyw5X12L+w==} engines: {node: '>= 10'} cpu: [x64] os: [darwin] - '@napi-rs/simple-git-freebsd-x64@0.1.19': - resolution: {integrity: sha512-sB9krVIchzd20FjI2ZZ8FDsTSsXLBdnwJ6CpeVyrhXHnoszfcqxt49ocZHujAS9lMpXq7i2Nv1EXJmCy4KdhwA==} + '@napi-rs/simple-git-freebsd-x64@0.1.21': + resolution: {integrity: sha512-n3S74zw0WIuCdsXV6hdU3vpakYNZyeTU3VlQdV/m5f3TxxqeEGcxJi18s2QfQOelE/N0Ze+u23USd7b06NQlCg==} engines: {node: '>= 10'} cpu: [x64] os: [freebsd] - '@napi-rs/simple-git-linux-arm-gnueabihf@0.1.19': - resolution: {integrity: sha512-6HPn09lr9N1n5/XKfP8Np53g4fEXVxOFqNkS6rTH3Rm1lZHdazTRH62RggXLTguZwjcE+MvOLvoTIoR5kAS8+g==} + '@napi-rs/simple-git-linux-arm-gnueabihf@0.1.21': + resolution: {integrity: sha512-gsLnZD8OMttCjB2OYofDdsI9SpidMfJP6H8fjPXcon2q90JT/XUS7xIYXDEABiwRvz1BZ149HqmnjO8yPgNMIQ==} engines: {node: '>= 10'} cpu: [arm] os: [linux] - '@napi-rs/simple-git-linux-arm64-gnu@0.1.19': - resolution: {integrity: sha512-G0gISckt4cVDp3oh5Z6PV3GHJrJO6Z8bIS+9xA7vTtKdqB1i5y0n3cSFLlzQciLzhr+CajFD27doW4lEyErQ/Q==} + '@napi-rs/simple-git-linux-arm64-gnu@0.1.21': + resolution: {integrity: sha512-05hSW4K5RexXo6YICmKzBThkY4WXJ25MAkSON720kIVv8ZPLi0ZouijJuM7GWmEZPcgCm6/mvrGrEDrS6i0/Mg==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@napi-rs/simple-git-linux-arm64-musl@0.1.19': - resolution: {integrity: sha512-OwTRF+H4IZYxmDFRi1IrLMfqbdIpvHeYbJl2X94NVsLVOY+3NUHvEzL3fYaVx5urBaMnIK0DD3wZLbcueWvxbA==} + '@napi-rs/simple-git-linux-arm64-musl@0.1.21': + resolution: {integrity: sha512-z2dyQmwtbpgAuUmWeJBhz00/6C3//SV0YSYE9Smfaf2DiSEEAvWyoni67pQU5/Q9FFaiyvzrCoz966EVNmz6Bg==} engines: {node: '>= 10'} cpu: [arm64] os: [linux] - '@napi-rs/simple-git-linux-powerpc64le-gnu@0.1.19': - resolution: {integrity: sha512-p7zuNNVyzpRvkCt2RIGv9FX/WPcPbZ6/FRUgUTZkA2WU33mrbvNqSi4AOqCCl6mBvEd+EOw5NU4lS9ORRJvAEg==} + '@napi-rs/simple-git-linux-ppc64-gnu@0.1.21': + resolution: {integrity: sha512-mEkVx9oQxKTdzTdjDTCc9XAaH9E9eI2F+KsY0R6DTYafgb/rwq0FJO+eCa8Llzz6ndgbLrzq4q+wHqR8z7dF3w==} engines: {node: '>= 10'} - cpu: [powerpc64le] + cpu: [ppc64] os: [linux] - '@napi-rs/simple-git-linux-s390x-gnu@0.1.19': - resolution: {integrity: sha512-6N2vwJUPLiak8GLrS0a3is0gSb0UwI2CHOOqtvQxPmv+JVI8kn3vKiUscsktdDb0wGEPeZ8PvZs0y8UWix7K4g==} + '@napi-rs/simple-git-linux-s390x-gnu@0.1.21': + resolution: {integrity: sha512-FulRem5vdsvH0VER2Q9cynv01SugMk/jQwbytwyPziF6JZ81D6I8otP9NkS3dqv//6HCokyojH+oOnrsF82/VQ==} engines: {node: '>= 10'} cpu: [s390x] os: [linux] - '@napi-rs/simple-git-linux-x64-gnu@0.1.19': - resolution: {integrity: sha512-61YfeO1J13WK7MalLgP3QlV6of2rWnVw1aqxWkAgy/lGxoOFSJ4Wid6ANVCEZk4tJpPX/XNeneqkUz5xpeb2Cw==} + '@napi-rs/simple-git-linux-x64-gnu@0.1.21': + resolution: {integrity: sha512-SY6HuLVH+IFlkz8aTf4hwtaXalqBIPyE7FvEMCQIVPf85slOHMs9RThmrL7fvuSl0EDuUKOXANUP2OtdgT+zNg==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@napi-rs/simple-git-linux-x64-musl@0.1.19': - resolution: {integrity: sha512-cCTWNpMJnN3PrUBItWcs3dQKCydsIasbrS3laMzq8k7OzF93Zrp2LWDTPlLCO9brbBVpBzy2Qk5Xg9uAfe/Ukw==} + '@napi-rs/simple-git-linux-x64-musl@0.1.21': + resolution: {integrity: sha512-bG6zRqlXmVysjUUXNPsApfXP6c+rSjhinmGlLh8XW6Tfj0PqYmbSTL/3XcowbP6yJGTJbbkvxmhQDdGYO99AnQ==} engines: {node: '>= 10'} cpu: [x64] os: [linux] - '@napi-rs/simple-git-win32-arm64-msvc@0.1.19': - resolution: {integrity: sha512-sWavb1BjeLKKBA+PbTsRSSzVNfb7V/dOpaJvkgR5d2kWFn/AHmCZHSSj/3nyZdYf0BdDC+DIvqk3daAEZ6QMVw==} + '@napi-rs/simple-git-win32-arm64-msvc@0.1.21': + resolution: {integrity: sha512-bTX+Xb5Fl3AYK2c8E/Pm04i29n9gP+FGNzaT7AQp0q/5Bgq1z/4jEadSmg5hXvoJOlIFN0+HZyau9gWGq7DpCQ==} engines: {node: '>= 10'} cpu: [arm64] os: [win32] - '@napi-rs/simple-git-win32-x64-msvc@0.1.19': - resolution: {integrity: sha512-FmNuPoK4+qwaSCkp8lm3sJlrxk374enW+zCE5ZksXlZzj/9BDJAULJb5QUJ7o9Y8A/G+d8LkdQLPBE2Jaxe5XA==} - engines: {node: '>= 10'} - cpu: [x64] - os: [win32] - - '@napi-rs/simple-git@0.1.19': - resolution: {integrity: sha512-jMxvwzkKzd3cXo2EB9GM2ic0eYo2rP/BS6gJt6HnWbsDO1O8GSD4k7o2Cpr2YERtMpGF/MGcDfsfj2EbQPtrXw==} - engines: {node: '>= 10'} - - '@next/env@14.2.29': - resolution: {integrity: sha512-UzgLR2eBfhKIQt0aJ7PWH7XRPYw7SXz0Fpzdl5THjUnvxy4kfBk9OU4RNPNiETewEEtaBcExNFNn1QWH8wQTjg==} - - '@next/swc-darwin-arm64@14.2.29': - resolution: {integrity: sha512-wWtrAaxCVMejxPHFb1SK/PVV1WDIrXGs9ki0C/kUM8ubKHQm+3hU9MouUywCw8Wbhj3pewfHT2wjunLEr/TaLA==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [darwin] - - '@next/swc-darwin-x64@14.2.29': - resolution: {integrity: sha512-7Z/jk+6EVBj4pNLw/JQrvZVrAh9Bv8q81zCFSfvTMZ51WySyEHWVpwCEaJY910LyBftv2F37kuDPQm0w9CEXyg==} - engines: {node: '>= 10'} - cpu: [x64] - os: [darwin] - - '@next/swc-linux-arm64-gnu@14.2.29': - resolution: {integrity: sha512-o6hrz5xRBwi+G7JFTHc+RUsXo2lVXEfwh4/qsuWBMQq6aut+0w98WEnoNwAwt7hkEqegzvazf81dNiwo7KjITw==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [linux] - - '@next/swc-linux-arm64-musl@14.2.29': - resolution: {integrity: sha512-9i+JEHBOVgqxQ92HHRFlSW1EQXqa/89IVjtHgOqsShCcB/ZBjTtkWGi+SGCJaYyWkr/lzu51NTMCfKuBf7ULNw==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [linux] - - '@next/swc-linux-x64-gnu@14.2.29': - resolution: {integrity: sha512-B7JtMbkUwHijrGBOhgSQu2ncbCYq9E7PZ7MX58kxheiEOwdkM+jGx0cBb+rN5AeqF96JypEppK6i/bEL9T13lA==} - engines: {node: '>= 10'} - cpu: [x64] - os: [linux] - - '@next/swc-linux-x64-musl@14.2.29': - resolution: {integrity: sha512-yCcZo1OrO3aQ38B5zctqKU1Z3klOohIxug6qdiKO3Q3qNye/1n6XIs01YJ+Uf+TdpZQ0fNrOQI2HrTLF3Zprnw==} - engines: {node: '>= 10'} - cpu: [x64] - os: [linux] - - '@next/swc-win32-arm64-msvc@14.2.29': - resolution: {integrity: sha512-WnrfeOEtTVidI9Z6jDLy+gxrpDcEJtZva54LYC0bSKQqmyuHzl0ego+v0F/v2aXq0am67BRqo/ybmmt45Tzo4A==} - engines: {node: '>= 10'} - cpu: [arm64] - os: [win32] - - '@next/swc-win32-ia32-msvc@14.2.29': - resolution: {integrity: sha512-vkcriFROT4wsTdSeIzbxaZjTNTFKjSYmLd8q/GVH3Dn8JmYjUKOuKXHK8n+lovW/kdcpIvydO5GtN+It2CvKWA==} + '@napi-rs/simple-git-win32-ia32-msvc@0.1.21': + resolution: {integrity: sha512-jGdFPAJYgUSrPTGaM9D7devuSXby6FL9NzKffB5AXcL0AeB5HpqxaxOiOikunP5NQil1vEow6YxD4SyDIX57Cg==} engines: {node: '>= 10'} cpu: [ia32] os: [win32] - '@next/swc-win32-x64-msvc@14.2.29': - resolution: {integrity: sha512-iPPwUEKnVs7pwR0EBLJlwxLD7TTHWS/AoVZx1l9ZQzfQciqaFEr5AlYzA2uB6Fyby1IF18t4PL0nTpB+k4Tzlw==} + '@napi-rs/simple-git-win32-x64-msvc@0.1.21': + resolution: {integrity: sha512-OEVKwZ4pEGSk3AxlEaEKba6bCepbV7w+v7BjeNgCscl0Evw8A81nV2ytNqajZAPk49bZZDSDzeNWe44kkaD96Q==} + engines: {node: '>= 10'} + cpu: [x64] + os: [win32] + + '@napi-rs/simple-git@0.1.21': + resolution: {integrity: sha512-49ij3JTYr/eSqvUaAXORgohU7pARH7GpCn/8JrwQo/mIulSY+gc/Xj4FtZ2+RdJrK6dLD6jSGU5vI17Of0qtCw==} + engines: {node: '>= 10'} + + '@next/env@14.2.31': + resolution: {integrity: sha512-X8VxxYL6VuezrG82h0pUA1V+DuTSJp7Nv15bxq3ivrFqZLjx81rfeHMWOE9T0jm1n3DtHGv8gdn6B0T0kr0D3Q==} + + '@next/swc-darwin-arm64@14.2.31': + resolution: {integrity: sha512-dTHKfaFO/xMJ3kzhXYgf64VtV6MMwDs2viedDOdP+ezd0zWMOQZkxcwOfdcQeQCpouTr9b+xOqMCUXxgLizl8Q==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [darwin] + + '@next/swc-darwin-x64@14.2.31': + resolution: {integrity: sha512-iSavebQgeMukUAfjfW8Fi2Iz01t95yxRl2w2wCzjD91h5In9la99QIDKcKSYPfqLjCgwz3JpIWxLG6LM/sxL4g==} + engines: {node: '>= 10'} + cpu: [x64] + os: [darwin] + + '@next/swc-linux-arm64-gnu@14.2.31': + resolution: {integrity: sha512-XJb3/LURg1u1SdQoopG6jDL2otxGKChH2UYnUTcby4izjM0il7ylBY5TIA7myhvHj9lG5pn9F2nR2s3i8X9awQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + + '@next/swc-linux-arm64-musl@14.2.31': + resolution: {integrity: sha512-IInDAcchNCu3BzocdqdCv1bKCmUVO/bKJHnBFTeq3svfaWpOPewaLJ2Lu3GL4yV76c/86ZvpBbG/JJ1lVIs5MA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + + '@next/swc-linux-x64-gnu@14.2.31': + resolution: {integrity: sha512-YTChJL5/9e4NXPKW+OJzsQa42RiWUNbE+k+ReHvA+lwXk+bvzTsVQboNcezWOuCD+p/J+ntxKOB/81o0MenBhw==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + + '@next/swc-linux-x64-musl@14.2.31': + resolution: {integrity: sha512-A0JmD1y4q/9ufOGEAhoa60Sof++X10PEoiWOH0gZ2isufWZeV03NnyRlRmJpRQWGIbRkJUmBo9I3Qz5C10vx4w==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + + '@next/swc-win32-arm64-msvc@14.2.31': + resolution: {integrity: sha512-nowJ5GbMeDOMzbTm29YqrdrD6lTM8qn2wnZfGpYMY7SZODYYpaJHH1FJXE1l1zWICHR+WfIMytlTDBHu10jb8A==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [win32] + + '@next/swc-win32-ia32-msvc@14.2.31': + resolution: {integrity: sha512-pk9Bu4K0015anTS1OS9d/SpS0UtRObC+xe93fwnm7Gvqbv/W1ZbzhK4nvc96RURIQOux3P/bBH316xz8wjGSsA==} + engines: {node: '>= 10'} + cpu: [ia32] + os: [win32] + + '@next/swc-win32-x64-msvc@14.2.31': + resolution: {integrity: sha512-LwFZd4JFnMHGceItR9+jtlMm8lGLU/IPkgjBBgYmdYSfalbHCiDpjMYtgDQ2wtwiAOSJOCyFI4m8PikrsDyA6Q==} engines: {node: '>= 10'} cpu: [x64] os: [win32] @@ -384,40 +385,40 @@ packages: '@types/react': optional: true - '@react-aria/focus@3.20.3': - resolution: {integrity: sha512-rR5uZUMSY4xLHmpK/I8bP1V6vUNHFo33gTvrvNUsAKKqvMfa7R2nu5A6v97dr5g6tVH6xzpdkPsOJCWh90H2cw==} + '@react-aria/focus@3.21.0': + resolution: {integrity: sha512-7NEGtTPsBy52EZ/ToVKCu0HSelE3kq9qeis+2eEq90XSuJOMaDHUQrA7RC2Y89tlEwQB31bud/kKRi9Qme1dkA==} peerDependencies: react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 react-dom: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 - '@react-aria/interactions@3.25.1': - resolution: {integrity: sha512-ntLrlgqkmZupbbjekz3fE/n3eQH2vhncx8gUp0+N+GttKWevx7jos11JUBjnJwb1RSOPgRUFcrluOqBp0VgcfQ==} + '@react-aria/interactions@3.25.4': + resolution: {integrity: sha512-HBQMxgUPHrW8V63u9uGgBymkMfj6vdWbB0GgUJY49K9mBKMsypcHeWkWM6+bF7kxRO728/IK8bWDV6whDbqjHg==} peerDependencies: react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 react-dom: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 - '@react-aria/ssr@3.9.8': - resolution: {integrity: sha512-lQDE/c9uTfBSDOjaZUJS8xP2jCKVk4zjQeIlCH90xaLhHDgbpCdns3xvFpJJujfj3nI4Ll9K7A+ONUBDCASOuw==} + '@react-aria/ssr@3.9.10': + resolution: {integrity: sha512-hvTm77Pf+pMBhuBm760Li0BVIO38jv1IBws1xFm1NoL26PU+fe+FMW5+VZWyANR6nYL65joaJKZqOdTQMkO9IQ==} engines: {node: '>= 12'} peerDependencies: react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 - '@react-aria/utils@3.29.0': - resolution: {integrity: sha512-jSOrZimCuT1iKNVlhjIxDkAhgF7HSp3pqyT6qjg/ZoA0wfqCi/okmrMPiWSAKBnkgX93N8GYTLT3CIEO6WZe9Q==} + '@react-aria/utils@3.30.0': + resolution: {integrity: sha512-ydA6y5G1+gbem3Va2nczj/0G0W7/jUVo/cbN10WA5IizzWIwMP5qhFr7macgbKfHMkZ+YZC3oXnt2NNre5odKw==} peerDependencies: react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 react-dom: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 - '@react-stately/flags@3.1.1': - resolution: {integrity: sha512-XPR5gi5LfrPdhxZzdIlJDz/B5cBf63l4q6/AzNqVWFKgd0QqY5LvWJftXkklaIUpKSJkIKQb8dphuZXDtkWNqg==} + '@react-stately/flags@3.1.2': + resolution: {integrity: sha512-2HjFcZx1MyQXoPqcBGALwWWmgFVUk2TuKVIQxCbRq7fPyWXIl6VHcakCLurdtYC2Iks7zizvz0Idv48MQ38DWg==} - '@react-stately/utils@3.10.6': - resolution: {integrity: sha512-O76ip4InfTTzAJrg8OaZxKU4vvjMDOpfA/PGNOytiXwBbkct2ZeZwaimJ8Bt9W1bj5VsZ81/o/tW4BacbdDOMA==} + '@react-stately/utils@3.10.8': + resolution: {integrity: sha512-SN3/h7SzRsusVQjQ4v10LaVsDc81jyyR0DD5HnsQitm/I5WDpaSr2nRHtyloPFU48jlql1XX/S04T2DLQM7Y3g==} peerDependencies: react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 - '@react-types/shared@3.29.1': - resolution: {integrity: sha512-KtM+cDf2CXoUX439rfEhbnEdAgFZX20UP2A35ypNIawR7/PFFPjQDWyA2EnClCcW/dLWJDEPX2U8+EJff8xqmQ==} + '@react-types/shared@3.31.0': + resolution: {integrity: sha512-ua5U6V66gDcbLZe4P2QeyNgPp4YWD1ymGA6j3n+s8CGExtrCPe64v+g4mvpT8Bnb985R96e4zFT61+m0YCwqMg==} peerDependencies: react: ^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1 @@ -454,14 +455,14 @@ packages: '@swc/helpers@0.5.5': resolution: {integrity: sha512-KGYxvIOXcceOAbEk4bi/dVLEK9z8sZ0uBB3Il5b1rhfClSpcX0yfRO0KmTkqR2cnQDymwLB+25ZyMzICg/cm/A==} - '@tanstack/react-virtual@3.13.9': - resolution: {integrity: sha512-SPWC8kwG/dWBf7Py7cfheAPOxuvIv4fFQ54PdmYbg7CpXfsKxkucak43Q0qKsxVthhUJQ1A7CIMAIplq4BjVwA==} + '@tanstack/react-virtual@3.13.12': + resolution: {integrity: sha512-Gd13QdxPSukP8ZrkbgS2RwoZseTTbQPLnQEn7HY/rqtM+8Zt95f7xKC7N0EsKs7aoz0WzZ+fditZux+F8EzYxA==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - '@tanstack/virtual-core@3.13.9': - resolution: {integrity: sha512-3jztt0jpaoJO5TARe2WIHC1UQC3VMLAFUW5mmMo0yrkwtDB2AQP0+sh10BVUpWrnvHjSLvzFizydtEGLCJKFoQ==} + '@tanstack/virtual-core@3.13.12': + resolution: {integrity: sha512-1YBOJfRHV4sXUmWsFSf5rQor4Ss82G8dQWLRbnk3GA4jeP8hQt1hxXh0tmflpC0dz3VgEv/1+qwPyLeWkQuPFA==} '@theguild/remark-mermaid@0.1.3': resolution: {integrity: sha512-2FjVlaaKXK7Zj7UJAgOVTyaahn/3/EAfqYhyXg0BfDBVUl+lXcoIWRaxzqfnDr2rv8ax6GsC5mNh6hAaT86PDw==} @@ -492,8 +493,8 @@ packages: '@types/d3-delaunay@6.0.4': resolution: {integrity: sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==} - '@types/d3-dispatch@3.0.6': - resolution: {integrity: sha512-4fvZhzMeeuBJYZXRXrRIQnvUYfyXwYmLsdiN7XXmVNQKKw1cM8a5WdID0g1hVFZDqT9ZqZEY5pD44p24VS7iZQ==} + '@types/d3-dispatch@3.0.7': + resolution: {integrity: sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==} '@types/d3-drag@3.0.7': resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==} @@ -570,8 +571,8 @@ packages: '@types/estree-jsx@1.0.5': resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} - '@types/estree@1.0.7': - resolution: {integrity: sha512-w28IoSUCJpidD/TGviZwwMJckNESJZXFu7NBZ5YJ4mEUnNraUn9Pm8HSZm/jDF1pDWYKspWE7oVphigUPRakIQ==} + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} '@types/geojson@7946.0.16': resolution: {integrity: sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==} @@ -597,8 +598,8 @@ packages: '@types/node@18.11.10': resolution: {integrity: sha512-juG3RWMBOqcOuXC643OAdSA525V44cVgGV6dUDuiFtss+8Fk5x1hI93Rsld43VeJVIeqlP9I7Fn9/qaVqoEAuQ==} - '@types/prop-types@15.7.14': - resolution: {integrity: sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ==} + '@types/prop-types@15.7.15': + resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} '@types/react-dom@18.3.7': resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==} @@ -634,8 +635,8 @@ packages: peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 - acorn@8.14.1: - resolution: {integrity: sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==} + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} engines: {node: '>=0.4.0'} hasBin: true @@ -697,15 +698,15 @@ packages: resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} engines: {node: '>=8'} - brace-expansion@2.0.1: - resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} - browserslist@4.25.0: - resolution: {integrity: sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA==} + browserslist@4.25.2: + resolution: {integrity: sha512-0si2SJK3ooGzIawRu61ZdPCO1IncZwS8IzuX73sPZsXW6EQ/w/DAfPyKI8l1ETTCr2MnvqWitmlCUxgdul45jA==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true @@ -717,14 +718,14 @@ packages: resolution: {integrity: sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==} engines: {node: '>= 6'} - caniuse-lite@1.0.30001721: - resolution: {integrity: sha512-cOuvmUVtKrtEaoKiO0rSc29jcjwMwX5tOHDy4MgVFEWiUXj4uBMJkwI8MDySkgXidpMiHUcviogAvFi4pA2hDQ==} + caniuse-lite@1.0.30001735: + resolution: {integrity: sha512-EV/laoX7Wq2J9TQlyIXRxTJqIw4sxfXS4OYgudGxBYRuTv0q7AM6yMEpU/Vo1I94thg9U6EZ2NfZx9GJq83u7w==} ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} - chalk@5.4.1: - resolution: {integrity: sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==} + chalk@5.5.0: + resolution: {integrity: sha512-1tm8DTaJhPBG3bIkVeZt1iZM9GfSX2lzOeDVZH9R9ffRHpmHvxZ/QhgQH/aDTkswQVt+YHdXAdS/In/30OjCbg==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} character-entities-html4@2.1.0: @@ -803,8 +804,8 @@ packages: confbox@0.2.2: resolution: {integrity: sha512-1NB+BKqhtNipMsov4xI/NnhCKp9XG9NamYp5PVm9klAT0fsrNPjaFICsCFhNhwZJKNh7zB/3q8qXz0E9oaMNtQ==} - core-js@3.42.0: - resolution: {integrity: sha512-Sz4PP4ZA+Rq4II21qkNqOEDTDrCvcANId3xpIgB34NDkWc3UduWj2dqEtN9yZIq8Dk3HyPI33x9sqqU5C8sr0g==} + core-js@3.45.0: + resolution: {integrity: sha512-c2KZL9lP4DjkN3hk/an4pWn5b5ZefhRJnAc42n6LJ19kSnbeRbdQZE5dSeE2LBol1OwJD3X1BQvFTAsa8ReeDA==} cose-base@1.0.3: resolution: {integrity: sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==} @@ -834,8 +835,8 @@ packages: peerDependencies: cytoscape: ^3.2.0 - cytoscape@3.32.0: - resolution: {integrity: sha512-5JHBC9n75kz5851jeklCPmZWcg3hUe6sjqJvyk3+hVqFaKcHwHgxsjeN1yLmggoUc6STbtm9/NQyabQehfjvWQ==} + cytoscape@3.33.1: + resolution: {integrity: sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==} engines: {node: '>=0.10'} d3-array@2.12.1: @@ -995,8 +996,8 @@ packages: decimal.js-light@2.5.1: resolution: {integrity: sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==} - decode-named-character-reference@1.1.0: - resolution: {integrity: sha512-Wy+JTSbFThEOXQIR2L6mxJvEs+veIzpmqD7ynWxMXGpnk3smkHQOp6forLdHsKpAMW9iJpaBBIxz285t1n1C3w==} + decode-named-character-reference@1.2.0: + resolution: {integrity: sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==} delaunator@5.0.1: resolution: {integrity: sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==} @@ -1023,8 +1024,8 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - electron-to-chromium@1.5.165: - resolution: {integrity: sha512-naiMx1Z6Nb2TxPU6fiFrUrDTjyPMLdTtaOd2oLmG8zVSg2hCWGkhPyxwk+qRmZ1ytwVqUv0u7ZcDA5+ALhaUtw==} + electron-to-chromium@1.5.201: + resolution: {integrity: sha512-ZG65vsrLClodGqywuigc+7m0gr4ISoTQttfVh7nfpLv0M7SIwF4WbFNEOywcqTiujs12AUeeXbFyQieDICAIxg==} emoji-regex-xs@1.0.0: resolution: {integrity: sha512-LRlerrMYoIDrT6jgpeZ2YYl/L8EulRTt5hQcYjy5AInh7HWXKimpqx68aknBFpGL2+/IcogTcaydJEgaTmOpDg==} @@ -1035,8 +1036,8 @@ packages: emoji-regex@9.2.2: resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} - entities@6.0.0: - resolution: {integrity: sha512-aKstq2TDOndCn4diEyp9Uq/Flu2i1GlLkc6XIDQSDMuaFE3OPW5OphLCyQ5SpSJZTb4reN+kTcYru5yIfXoRPw==} + entities@6.0.1: + resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==} engines: {node: '>=0.12'} esast-util-from-estree@2.0.0: @@ -1096,8 +1097,8 @@ packages: resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} engines: {node: '>=16.17'} - exsolve@1.0.5: - resolution: {integrity: sha512-pz5dvkYYKQ1AHVrgOzBKWeP4u4FRb3a6DNK2ucr0OoNwYIU4QWsJ+NM36LLzORT+z845MzKHHhpXiUF5nvQoJg==} + exsolve@1.0.7: + resolution: {integrity: sha512-VO5fQUzZtI6C+vx4w/4BWJpg3s/5l+6pRQEHzFRM8WFi4XffSP1Z+4qi7GbjWbvRQEbdIco5mIMq+zX4rPuLrw==} extend-shallow@2.0.1: resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} @@ -1408,9 +1409,9 @@ packages: markdown-table@3.0.4: resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==} - marked@15.0.12: - resolution: {integrity: sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA==} - engines: {node: '>= 18'} + marked@16.1.2: + resolution: {integrity: sha512-rNQt5EvRinalby7zJZu/mB+BvaAY2oz3wCuCjt1RDrWNpS1Pdf9xqMOeC9Hm5adBdcV/3XZPJpG58eT+WBc0XQ==} + engines: {node: '>= 20'} hasBin: true mathjax-full@3.2.2: @@ -1477,8 +1478,8 @@ packages: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} - mermaid@11.6.0: - resolution: {integrity: sha512-PE8hGUy1LDlWIHWBP05SFdqUHGmRcCcK4IzpOKPE35eOw+G9zZgcnMpyunJVUEOgb//KBORPjysKndw8bFLuRg==} + mermaid@11.9.0: + resolution: {integrity: sha512-YdPXn9slEwO0omQfQIsW6vS84weVQftIyyTGAZCwM//MGhPzL1+l6vO6bkf0wnP4tHigH1alZ5Ooy3HXI2gOag==} mhchemparser@4.2.1: resolution: {integrity: sha512-kYmyrCirqJf3zZ9t/0wGgRZ4/ZJw//VwaRVGA75C4nhE60vtnIzhl9J9ndkX/h6hxSN7pjg/cE0VxbnNM+bnDQ==} @@ -1637,8 +1638,8 @@ packages: react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc - next@14.2.29: - resolution: {integrity: sha512-s98mCOMOWLGGpGOfgKSnleXLuegvvH415qtRZXpSp00HeEgdmrxmwL9cgKU+h4XrhB16zEI5d/7BnkS3ATInsA==} + next@14.2.31: + resolution: {integrity: sha512-Wyw1m4t8PhqG+or5a1U/Deb888YApC4rAez9bGhHkTsfwAy4SWKVro0GhEx4sox1856IbLhvhce2hAA6o8vkog==} engines: {node: '>=18.17.0'} hasBin: true peerDependencies: @@ -1769,8 +1770,8 @@ packages: pkg-types@1.3.1: resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} - pkg-types@2.1.0: - resolution: {integrity: sha512-wmJwA+8ihJixSoHKxZJRBQG1oY8Yr9pGLzRmSsNms0iNWyHHAlZCa7mmKiFR10YPZuz/2k169JiS/inOjBCZ2A==} + pkg-types@2.2.0: + resolution: {integrity: sha512-2SM/GZGAEkPp3KWORxQZns4M+WSeXbC2HEvmOIJe3Cmiv6ieAJvdVhDldtHqM5J1Y7MrR1XhkBT/rMlhh9FdqQ==} points-on-curve@0.2.0: resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} @@ -1819,12 +1820,12 @@ packages: resolution: {integrity: sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==} engines: {node: ^10 || ^12 || >=14} - postcss@8.5.4: - resolution: {integrity: sha512-QSa9EBe+uwlGTFmHsPKokv3B/oEMQZxfqW0QqNCyhpa6mB1afzulwn8hihglqAb2pOw+BJgNlmXQ8la2VeHB7w==} + postcss@8.5.6: + resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} - posthog-js@1.249.3: - resolution: {integrity: sha512-+DMUJYADNakPU05tGdJFoWbpyb5gDwRczT2+gFI72Th3315Wl+k7hKwBpzq2zvH5+jKOHjE0SKD/fzf1AFInkw==} + posthog-js@1.260.1: + resolution: {integrity: sha512-DD8ZSRpdScacMqtqUIvMFme8lmOWkOvExG8VvjONE7Cm3xpRH5xXpfrwMJE4bayTGWKMx4ij6SfphK6dm/o2ug==} peerDependencies: '@rrweb/types': 2.0.0-alpha.17 rrweb-snapshot: 2.0.0-alpha.17 @@ -1834,11 +1835,11 @@ packages: rrweb-snapshot: optional: true - preact@10.26.8: - resolution: {integrity: sha512-1nMfdFjucm5hKvq0IClqZwK4FJkGXhRrQstOQ3P4vp8HxKrJEMFcY6RdBRVTdfQS/UlnX6gfbPuTvaqx/bDoeQ==} + preact@10.27.0: + resolution: {integrity: sha512-/DTYoB6mwwgPytiqQTh/7SFRL98ZdiD8Sk8zIUVOxtwq4oWcwrcd1uno9fE/zZmUaUrFNYzbH14CPebOz9tZQw==} - prettier@3.5.3: - resolution: {integrity: sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==} + prettier@3.6.2: + resolution: {integrity: sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==} engines: {node: '>=14'} hasBin: true @@ -1874,8 +1875,8 @@ packages: peerDependencies: react: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 - react-medium-image-zoom@5.2.14: - resolution: {integrity: sha512-nfTVYcAUnBzXQpPDcZL+cG/e6UceYUIG+zDcnemL7jtAqbJjVVkA85RgneGtJeni12dTyiRPZVM6Szkmwd/o8w==} + react-medium-image-zoom@5.3.0: + resolution: {integrity: sha512-RCIzVlsKqy3BYgGgYbolUfuvx0aSKC7YhX/IJGEp+WJxsqdIVYJHkBdj++FAj6VD7RiWj6VVmdCfa/9vJE9hZg==} peerDependencies: react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -1915,8 +1916,8 @@ packages: recharts-scale@0.4.5: resolution: {integrity: sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==} - recharts@2.15.3: - resolution: {integrity: sha512-EdOPzTwcFSuqtvkDoaM5ws/Km1+WTAO2eizL7rqiG0V2UVhTnz0m7J2i0CjVPUCdEkZImaWvXLbZDS2H5t6GFQ==} + recharts@2.15.4: + resolution: {integrity: sha512-UT/q6fwS3c1dHbXv2uFgYJ9BMFHu3fwnd7AYZaEQhXuYQ4hgsxLvsUXzGdKeZrW5xopzDCvuA2N41WJ88I7zIw==} engines: {node: '>=14'} peerDependencies: react: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -1925,8 +1926,10 @@ packages: recma-build-jsx@1.0.0: resolution: {integrity: sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew==} - recma-jsx@1.0.0: - resolution: {integrity: sha512-5vwkv65qWwYxg+Atz95acp8DMu1JDSqdGkA2Of1j6rCreyFUE/gp15fC8MnGEuG1W68UKjM6x6+YTWIh7hZM/Q==} + recma-jsx@1.0.1: + resolution: {integrity: sha512-huSIy7VU2Z5OLv6oFLosQGGDqPqdO1iq6bWNAdhzMxSJP7RAso4fCZ1cKu8j9YHCZf3TPrq4dw3okhrylgcd7w==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 recma-parse@1.0.0: resolution: {integrity: sha512-OYLsIGBB5Y5wjnSnQW6t3Xg7q3fQ7FWbw/vcXtORTnyaSFscOtABg+7Pnz6YZ6c27fG1/aN8CjfwoUEUIdwqWQ==} @@ -2061,9 +2064,9 @@ packages: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} - source-map@0.7.4: - resolution: {integrity: sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==} - engines: {node: '>= 8'} + source-map@0.7.6: + resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==} + engines: {node: '>= 12'} space-separated-tokens@2.0.2: resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} @@ -2106,11 +2109,11 @@ packages: resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} engines: {node: '>=12'} - style-to-js@1.1.16: - resolution: {integrity: sha512-/Q6ld50hKYPH3d/r6nr117TZkHR0w0kGGIVfpG9N6D8NymRPM9RqCUv4pRpJ62E5DqOYx2AFpbZMyCPnjQCnOw==} + style-to-js@1.1.17: + resolution: {integrity: sha512-xQcBGDxJb6jjFCTzvQtfiPn6YvvP2O8U1MDIPNfJQlWMYfktPy+iGsHE7cssjs7y84d9fQaK4UF3RIJaAHSoYA==} - style-to-object@1.0.8: - resolution: {integrity: sha512-xT47I/Eo0rwJmaXC4oilDGDWLohVhR6o/xAQcPQN8q6QBuZVL8qMYL85kLmST5cPjAorwvqIA4qXTRQoYHaL6g==} + style-to-object@1.0.9: + resolution: {integrity: sha512-G4qppLgKu/k6FwRpHiGiKPaPTFcG3g4wNVX/Qsfu+RqQM30E7Tyu/TEgxcL9PNLF5pdRLwQdE3YKKf+KF2Dzlw==} styled-jsx@5.1.1: resolution: {integrity: sha512-pW7uC1l4mBZ8ugbiZrcIsiIvVx1UmTfw7UkC3Um2tmfUq9Bhk8IiyEIPl6F8agHgjzku6j0xQEZbfA5uSgSaCw==} @@ -2137,8 +2140,8 @@ packages: resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} engines: {node: '>= 0.4'} - swr@2.3.3: - resolution: {integrity: sha512-dshNvs3ExOqtZ6kJBaAsabhPdHyeY4P2cKwRCniDVifBMoG/SVI7tfLWqPXriVspf2Rg4tPzXJTnwaihIeFw2A==} + swr@2.3.6: + resolution: {integrity: sha512-wfHRmHWk/isGNMwlLGlZX5Gzz/uTgo0o2IRuTMcf4CPuPFJZlq0rDaKUx+ozB5nBOReNV1kiOyzMfj+MBMikLw==} peerDependencies: react: ^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 @@ -2207,9 +2210,9 @@ packages: peerDependencies: typescript: '*' - typescript@4.9.5: - resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==} - engines: {node: '>=4.2.0'} + typescript@5.9.2: + resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} + engines: {node: '>=14.17'} hasBin: true ufo@1.6.1: @@ -2281,8 +2284,8 @@ packages: vfile-location@5.0.3: resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==} - vfile-message@4.0.2: - resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==} + vfile-message@4.0.3: + resolution: {integrity: sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==} vfile@6.0.3: resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} @@ -2332,8 +2335,8 @@ packages: resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} engines: {node: '>=12'} - yaml@2.8.0: - resolution: {integrity: sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==} + yaml@2.8.1: + resolution: {integrity: sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==} engines: {node: '>= 14.6'} hasBin: true @@ -2341,14 +2344,14 @@ packages: resolution: {integrity: sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==} engines: {node: '>=12.20'} - zod-validation-error@3.4.1: - resolution: {integrity: sha512-1KP64yqDPQ3rupxNv7oXhf7KdhHHgaqbKuspVoiN93TT0xrBjql+Svjkdjq/Qh/7GSMmgQs3AfvBT0heE35thw==} + zod-validation-error@3.5.3: + resolution: {integrity: sha512-OT5Y8lbUadqVZCsnyFaTQ4/O2mys4tj7PqhdbBCp7McPwvIEKfPtdA6QfPeFQK2/Rz5LgwmAXRJTugBNBi0btw==} engines: {node: '>=18.0.0'} peerDependencies: - zod: ^3.24.4 + zod: ^3.25.0 || ^4.0.0 - zod@3.25.51: - resolution: {integrity: sha512-TQSnBldh+XSGL+opiSIq0575wvDPqu09AqWe1F7JhUMKY+M91/aGlK4MhpVNO7MgYfHcVCB1ffwAUTJzllKJqg==} + zod@3.25.76: + resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} @@ -2364,7 +2367,7 @@ snapshots: '@antfu/utils@8.1.1': {} - '@babel/runtime@7.27.6': {} + '@babel/runtime@7.28.3': {} '@braintree/sanitize-url@7.1.1': {} @@ -2385,41 +2388,41 @@ snapshots: '@chevrotain/utils@11.0.3': {} - '@floating-ui/core@1.7.1': + '@floating-ui/core@1.7.3': dependencies: - '@floating-ui/utils': 0.2.9 + '@floating-ui/utils': 0.2.10 - '@floating-ui/dom@1.7.1': + '@floating-ui/dom@1.7.3': dependencies: - '@floating-ui/core': 1.7.1 - '@floating-ui/utils': 0.2.9 + '@floating-ui/core': 1.7.3 + '@floating-ui/utils': 0.2.10 - '@floating-ui/react-dom@2.1.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@floating-ui/react-dom@2.1.5(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@floating-ui/dom': 1.7.1 + '@floating-ui/dom': 1.7.3 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@floating-ui/react@0.26.28(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@floating-ui/react@0.26.28(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@floating-ui/react-dom': 2.1.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@floating-ui/utils': 0.2.9 + '@floating-ui/react-dom': 2.1.5(react-dom@18.3.1)(react@18.3.1) + '@floating-ui/utils': 0.2.10 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) tabbable: 6.2.0 - '@floating-ui/utils@0.2.9': {} + '@floating-ui/utils@0.2.10': {} '@formatjs/intl-localematcher@0.5.10': dependencies: tslib: 2.8.1 - '@headlessui/react@2.2.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@headlessui/react@2.2.7(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@floating-ui/react': 0.26.28(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@react-aria/focus': 3.20.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@react-aria/interactions': 3.25.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@tanstack/react-virtual': 3.13.9(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@floating-ui/react': 0.26.28(react-dom@18.3.1)(react@18.3.1) + '@react-aria/focus': 3.21.0(react-dom@18.3.1)(react@18.3.1) + '@react-aria/interactions': 3.25.4(react-dom@18.3.1)(react@18.3.1) + '@tanstack/react-virtual': 3.13.12(react-dom@18.3.1)(react@18.3.1) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) use-sync-external-store: 1.5.0(react@18.3.1) @@ -2448,26 +2451,23 @@ snapshots: wrap-ansi: 8.1.0 wrap-ansi-cjs: wrap-ansi@7.0.0 - '@jridgewell/gen-mapping@0.3.8': + '@jridgewell/gen-mapping@0.3.13': dependencies: - '@jridgewell/set-array': 1.2.1 - '@jridgewell/sourcemap-codec': 1.5.0 - '@jridgewell/trace-mapping': 0.3.25 + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.30 '@jridgewell/resolve-uri@3.1.2': {} - '@jridgewell/set-array@1.2.1': {} + '@jridgewell/sourcemap-codec@1.5.5': {} - '@jridgewell/sourcemap-codec@1.5.0': {} - - '@jridgewell/trace-mapping@0.3.25': + '@jridgewell/trace-mapping@0.3.30': dependencies: '@jridgewell/resolve-uri': 3.1.2 - '@jridgewell/sourcemap-codec': 1.5.0 + '@jridgewell/sourcemap-codec': 1.5.5 - '@mdx-js/mdx@3.1.0(acorn@8.14.1)': + '@mdx-js/mdx@3.1.0(acorn@8.15.0)': dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/estree-jsx': 1.0.5 '@types/hast': 3.0.4 '@types/mdx': 2.0.13 @@ -2479,13 +2479,13 @@ snapshots: hast-util-to-jsx-runtime: 2.3.6 markdown-extensions: 2.0.0 recma-build-jsx: 1.0.0 - recma-jsx: 1.0.0(acorn@8.14.1) + recma-jsx: 1.0.1(acorn@8.15.0) recma-stringify: 1.0.0 rehype-recma: 1.0.0 remark-mdx: 3.1.0 remark-parse: 11.0.0 remark-rehype: 11.1.2 - source-map: 0.7.4 + source-map: 0.7.6 unified: 11.0.5 unist-util-position-from-estree: 2.0.0 unist-util-stringify-position: 4.0.0 @@ -2501,96 +2501,100 @@ snapshots: '@types/react': 18.3.23 react: 18.3.1 - '@mermaid-js/parser@0.4.0': + '@mermaid-js/parser@0.6.2': dependencies: langium: 3.3.1 - '@napi-rs/simple-git-android-arm-eabi@0.1.19': + '@napi-rs/simple-git-android-arm-eabi@0.1.21': optional: true - '@napi-rs/simple-git-android-arm64@0.1.19': + '@napi-rs/simple-git-android-arm64@0.1.21': optional: true - '@napi-rs/simple-git-darwin-arm64@0.1.19': + '@napi-rs/simple-git-darwin-arm64@0.1.21': optional: true - '@napi-rs/simple-git-darwin-x64@0.1.19': + '@napi-rs/simple-git-darwin-x64@0.1.21': optional: true - '@napi-rs/simple-git-freebsd-x64@0.1.19': + '@napi-rs/simple-git-freebsd-x64@0.1.21': optional: true - '@napi-rs/simple-git-linux-arm-gnueabihf@0.1.19': + '@napi-rs/simple-git-linux-arm-gnueabihf@0.1.21': optional: true - '@napi-rs/simple-git-linux-arm64-gnu@0.1.19': + '@napi-rs/simple-git-linux-arm64-gnu@0.1.21': optional: true - '@napi-rs/simple-git-linux-arm64-musl@0.1.19': + '@napi-rs/simple-git-linux-arm64-musl@0.1.21': optional: true - '@napi-rs/simple-git-linux-powerpc64le-gnu@0.1.19': + '@napi-rs/simple-git-linux-ppc64-gnu@0.1.21': optional: true - '@napi-rs/simple-git-linux-s390x-gnu@0.1.19': + '@napi-rs/simple-git-linux-s390x-gnu@0.1.21': optional: true - '@napi-rs/simple-git-linux-x64-gnu@0.1.19': + '@napi-rs/simple-git-linux-x64-gnu@0.1.21': optional: true - '@napi-rs/simple-git-linux-x64-musl@0.1.19': + '@napi-rs/simple-git-linux-x64-musl@0.1.21': optional: true - '@napi-rs/simple-git-win32-arm64-msvc@0.1.19': + '@napi-rs/simple-git-win32-arm64-msvc@0.1.21': optional: true - '@napi-rs/simple-git-win32-x64-msvc@0.1.19': + '@napi-rs/simple-git-win32-ia32-msvc@0.1.21': optional: true - '@napi-rs/simple-git@0.1.19': + '@napi-rs/simple-git-win32-x64-msvc@0.1.21': + optional: true + + '@napi-rs/simple-git@0.1.21': optionalDependencies: - '@napi-rs/simple-git-android-arm-eabi': 0.1.19 - '@napi-rs/simple-git-android-arm64': 0.1.19 - '@napi-rs/simple-git-darwin-arm64': 0.1.19 - '@napi-rs/simple-git-darwin-x64': 0.1.19 - '@napi-rs/simple-git-freebsd-x64': 0.1.19 - '@napi-rs/simple-git-linux-arm-gnueabihf': 0.1.19 - '@napi-rs/simple-git-linux-arm64-gnu': 0.1.19 - '@napi-rs/simple-git-linux-arm64-musl': 0.1.19 - '@napi-rs/simple-git-linux-powerpc64le-gnu': 0.1.19 - '@napi-rs/simple-git-linux-s390x-gnu': 0.1.19 - '@napi-rs/simple-git-linux-x64-gnu': 0.1.19 - '@napi-rs/simple-git-linux-x64-musl': 0.1.19 - '@napi-rs/simple-git-win32-arm64-msvc': 0.1.19 - '@napi-rs/simple-git-win32-x64-msvc': 0.1.19 + '@napi-rs/simple-git-android-arm-eabi': 0.1.21 + '@napi-rs/simple-git-android-arm64': 0.1.21 + '@napi-rs/simple-git-darwin-arm64': 0.1.21 + '@napi-rs/simple-git-darwin-x64': 0.1.21 + '@napi-rs/simple-git-freebsd-x64': 0.1.21 + '@napi-rs/simple-git-linux-arm-gnueabihf': 0.1.21 + '@napi-rs/simple-git-linux-arm64-gnu': 0.1.21 + '@napi-rs/simple-git-linux-arm64-musl': 0.1.21 + '@napi-rs/simple-git-linux-ppc64-gnu': 0.1.21 + '@napi-rs/simple-git-linux-s390x-gnu': 0.1.21 + '@napi-rs/simple-git-linux-x64-gnu': 0.1.21 + '@napi-rs/simple-git-linux-x64-musl': 0.1.21 + '@napi-rs/simple-git-win32-arm64-msvc': 0.1.21 + '@napi-rs/simple-git-win32-ia32-msvc': 0.1.21 + '@napi-rs/simple-git-win32-x64-msvc': 0.1.21 - '@next/env@14.2.29': {} + '@next/env@14.2.31': {} - '@next/swc-darwin-arm64@14.2.29': + '@next/swc-darwin-arm64@14.2.31': optional: true - '@next/swc-darwin-x64@14.2.29': + '@next/swc-darwin-x64@14.2.31': optional: true - '@next/swc-linux-arm64-gnu@14.2.29': + '@next/swc-linux-arm64-gnu@14.2.31': optional: true - '@next/swc-linux-arm64-musl@14.2.29': + '@next/swc-linux-arm64-musl@14.2.31': optional: true - '@next/swc-linux-x64-gnu@14.2.29': + '@next/swc-linux-x64-gnu@14.2.31': optional: true - '@next/swc-linux-x64-musl@14.2.29': + '@next/swc-linux-x64-musl@14.2.31': optional: true - '@next/swc-win32-arm64-msvc@14.2.29': + '@next/swc-win32-arm64-msvc@14.2.31': optional: true - '@next/swc-win32-ia32-msvc@14.2.29': + '@next/swc-win32-ia32-msvc@14.2.31': optional: true - '@next/swc-win32-x64-msvc@14.2.29': + '@next/swc-win32-x64-msvc@14.2.31': optional: true '@nodelib/fs.scandir@2.1.5': @@ -2610,9 +2614,8 @@ snapshots: '@radix-ui/react-compose-refs@1.1.2(@types/react@18.3.23)(react@18.3.1)': dependencies: - react: 18.3.1 - optionalDependencies: '@types/react': 18.3.23 + react: 18.3.1 '@radix-ui/react-icons@1.3.2(react@18.3.1)': dependencies: @@ -2621,56 +2624,55 @@ snapshots: '@radix-ui/react-slot@1.2.3(@types/react@18.3.23)(react@18.3.1)': dependencies: '@radix-ui/react-compose-refs': 1.1.2(@types/react@18.3.23)(react@18.3.1) - react: 18.3.1 - optionalDependencies: '@types/react': 18.3.23 + react: 18.3.1 - '@react-aria/focus@3.20.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@react-aria/focus@3.21.0(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@react-aria/interactions': 3.25.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@react-aria/utils': 3.29.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@react-types/shared': 3.29.1(react@18.3.1) + '@react-aria/interactions': 3.25.4(react-dom@18.3.1)(react@18.3.1) + '@react-aria/utils': 3.30.0(react-dom@18.3.1)(react@18.3.1) + '@react-types/shared': 3.31.0(react@18.3.1) '@swc/helpers': 0.5.17 clsx: 2.1.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@react-aria/interactions@3.25.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@react-aria/interactions@3.25.4(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@react-aria/ssr': 3.9.8(react@18.3.1) - '@react-aria/utils': 3.29.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@react-stately/flags': 3.1.1 - '@react-types/shared': 3.29.1(react@18.3.1) + '@react-aria/ssr': 3.9.10(react@18.3.1) + '@react-aria/utils': 3.30.0(react-dom@18.3.1)(react@18.3.1) + '@react-stately/flags': 3.1.2 + '@react-types/shared': 3.31.0(react@18.3.1) '@swc/helpers': 0.5.17 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@react-aria/ssr@3.9.8(react@18.3.1)': + '@react-aria/ssr@3.9.10(react@18.3.1)': dependencies: '@swc/helpers': 0.5.17 react: 18.3.1 - '@react-aria/utils@3.29.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@react-aria/utils@3.30.0(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@react-aria/ssr': 3.9.8(react@18.3.1) - '@react-stately/flags': 3.1.1 - '@react-stately/utils': 3.10.6(react@18.3.1) - '@react-types/shared': 3.29.1(react@18.3.1) + '@react-aria/ssr': 3.9.10(react@18.3.1) + '@react-stately/flags': 3.1.2 + '@react-stately/utils': 3.10.8(react@18.3.1) + '@react-types/shared': 3.31.0(react@18.3.1) '@swc/helpers': 0.5.17 clsx: 2.1.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@react-stately/flags@3.1.1': + '@react-stately/flags@3.1.2': dependencies: '@swc/helpers': 0.5.17 - '@react-stately/utils@3.10.6(react@18.3.1)': + '@react-stately/utils@3.10.8(react@18.3.1)': dependencies: '@swc/helpers': 0.5.17 react: 18.3.1 - '@react-types/shared@3.29.1(react@18.3.1)': + '@react-types/shared@3.31.0(react@18.3.1)': dependencies: react: 18.3.1 @@ -2702,11 +2704,11 @@ snapshots: dependencies: '@shikijs/types': 1.29.2 - '@shikijs/twoslash@1.29.2(typescript@4.9.5)': + '@shikijs/twoslash@1.29.2(typescript@5.9.2)': dependencies: '@shikijs/core': 1.29.2 '@shikijs/types': 1.29.2 - twoslash: 0.2.12(typescript@4.9.5) + twoslash: 0.2.12(typescript@5.9.2) transitivePeerDependencies: - supports-color - typescript @@ -2729,17 +2731,17 @@ snapshots: '@swc/counter': 0.1.3 tslib: 2.8.1 - '@tanstack/react-virtual@3.13.9(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + '@tanstack/react-virtual@3.13.12(react-dom@18.3.1)(react@18.3.1)': dependencies: - '@tanstack/virtual-core': 3.13.9 + '@tanstack/virtual-core': 3.13.12 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@tanstack/virtual-core@3.13.9': {} + '@tanstack/virtual-core@3.13.12': {} '@theguild/remark-mermaid@0.1.3(react@18.3.1)': dependencies: - mermaid: 11.6.0 + mermaid: 11.9.0 react: 18.3.1 unist-util-visit: 5.0.0 transitivePeerDependencies: @@ -2771,7 +2773,7 @@ snapshots: '@types/d3-delaunay@6.0.4': {} - '@types/d3-dispatch@3.0.6': {} + '@types/d3-dispatch@3.0.7': {} '@types/d3-drag@3.0.7': dependencies: @@ -2843,7 +2845,7 @@ snapshots: '@types/d3-color': 3.1.3 '@types/d3-contour': 3.0.6 '@types/d3-delaunay': 6.0.4 - '@types/d3-dispatch': 3.0.6 + '@types/d3-dispatch': 3.0.7 '@types/d3-drag': 3.0.7 '@types/d3-dsv': 3.0.7 '@types/d3-ease': 3.0.2 @@ -2873,9 +2875,9 @@ snapshots: '@types/estree-jsx@1.0.5': dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 - '@types/estree@1.0.7': {} + '@types/estree@1.0.8': {} '@types/geojson@7946.0.16': {} @@ -2899,7 +2901,7 @@ snapshots: '@types/node@18.11.10': {} - '@types/prop-types@15.7.14': {} + '@types/prop-types@15.7.15': {} '@types/react-dom@18.3.7(@types/react@18.3.23)': dependencies: @@ -2907,7 +2909,7 @@ snapshots: '@types/react@18.3.23': dependencies: - '@types/prop-types': 15.7.14 + '@types/prop-types': 15.7.15 csstype: 3.1.3 '@types/trusted-types@2.0.7': @@ -2917,10 +2919,10 @@ snapshots: '@types/unist@3.0.3': {} - '@typescript/vfs@1.6.1(typescript@4.9.5)': + '@typescript/vfs@1.6.1(typescript@5.9.2)': dependencies: debug: 4.4.1 - typescript: 4.9.5 + typescript: 5.9.2 transitivePeerDependencies: - supports-color @@ -2928,11 +2930,11 @@ snapshots: '@xmldom/xmldom@0.9.8': {} - acorn-jsx@5.3.2(acorn@8.14.1): + acorn-jsx@5.3.2(acorn@8.15.0): dependencies: - acorn: 8.14.1 + acorn: 8.15.0 - acorn@8.14.1: {} + acorn@8.15.0: {} ansi-regex@5.0.1: {} @@ -2961,14 +2963,14 @@ snapshots: astring@1.9.0: {} - autoprefixer@10.4.21(postcss@8.5.4): + autoprefixer@10.4.21(postcss@8.5.6): dependencies: - browserslist: 4.25.0 - caniuse-lite: 1.0.30001721 + browserslist: 4.25.2 + caniuse-lite: 1.0.30001735 fraction.js: 4.3.7 normalize-range: 0.1.2 picocolors: 1.1.1 - postcss: 8.5.4 + postcss: 8.5.6 postcss-value-parser: 4.2.0 bail@2.0.2: {} @@ -2982,7 +2984,7 @@ snapshots: binary-extensions@2.3.0: {} - brace-expansion@2.0.1: + brace-expansion@2.0.2: dependencies: balanced-match: 1.0.2 @@ -2990,12 +2992,12 @@ snapshots: dependencies: fill-range: 7.1.1 - browserslist@4.25.0: + browserslist@4.25.2: dependencies: - caniuse-lite: 1.0.30001721 - electron-to-chromium: 1.5.165 + caniuse-lite: 1.0.30001735 + electron-to-chromium: 1.5.201 node-releases: 2.0.19 - update-browserslist-db: 1.1.3(browserslist@4.25.0) + update-browserslist-db: 1.1.3(browserslist@4.25.2) busboy@1.6.0: dependencies: @@ -3003,11 +3005,11 @@ snapshots: camelcase-css@2.0.1: {} - caniuse-lite@1.0.30001721: {} + caniuse-lite@1.0.30001735: {} ccount@2.0.1: {} - chalk@5.4.1: {} + chalk@5.5.0: {} character-entities-html4@2.1.0: {} @@ -3081,7 +3083,7 @@ snapshots: confbox@0.2.2: {} - core-js@3.42.0: {} + core-js@3.45.0: {} cose-base@1.0.3: dependencies: @@ -3101,17 +3103,17 @@ snapshots: csstype@3.1.3: {} - cytoscape-cose-bilkent@4.1.0(cytoscape@3.32.0): + cytoscape-cose-bilkent@4.1.0(cytoscape@3.33.1): dependencies: cose-base: 1.0.3 - cytoscape: 3.32.0 + cytoscape: 3.33.1 - cytoscape-fcose@2.2.0(cytoscape@3.32.0): + cytoscape-fcose@2.2.0(cytoscape@3.33.1): dependencies: cose-base: 2.2.0 - cytoscape: 3.32.0 + cytoscape: 3.33.1 - cytoscape@3.32.0: {} + cytoscape@3.33.1: {} d3-array@2.12.1: dependencies: @@ -3293,7 +3295,7 @@ snapshots: decimal.js-light@2.5.1: {} - decode-named-character-reference@1.1.0: + decode-named-character-reference@1.2.0: dependencies: character-entities: 2.0.2 @@ -3313,7 +3315,7 @@ snapshots: dom-helpers@5.2.1: dependencies: - '@babel/runtime': 7.27.6 + '@babel/runtime': 7.28.3 csstype: 3.1.3 dompurify@3.2.6: @@ -3322,7 +3324,7 @@ snapshots: eastasianwidth@0.2.0: {} - electron-to-chromium@1.5.165: {} + electron-to-chromium@1.5.201: {} emoji-regex-xs@1.0.0: {} @@ -3330,7 +3332,7 @@ snapshots: emoji-regex@9.2.2: {} - entities@6.0.0: {} + entities@6.0.1: {} esast-util-from-estree@2.0.0: dependencies: @@ -3342,9 +3344,9 @@ snapshots: esast-util-from-js@2.0.1: dependencies: '@types/estree-jsx': 1.0.5 - acorn: 8.14.1 + acorn: 8.15.0 esast-util-from-estree: 2.0.0 - vfile-message: 4.0.2 + vfile-message: 4.0.3 escalade@3.2.0: {} @@ -3356,7 +3358,7 @@ snapshots: estree-util-attach-comments@3.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 estree-util-build-jsx@3.0.1: dependencies: @@ -3371,18 +3373,18 @@ snapshots: estree-util-scope@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 estree-util-to-js@2.0.0: dependencies: '@types/estree-jsx': 1.0.5 astring: 1.9.0 - source-map: 0.7.4 + source-map: 0.7.6 estree-util-value-to-estree@3.4.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 estree-util-visit@2.0.0: dependencies: @@ -3391,7 +3393,7 @@ snapshots: estree-walker@3.0.3: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 eventemitter3@4.0.7: {} @@ -3407,7 +3409,7 @@ snapshots: signal-exit: 4.1.0 strip-final-newline: 3.0.0 - exsolve@1.0.5: {} + exsolve@1.0.7: {} extend-shallow@2.0.1: dependencies: @@ -3515,7 +3517,7 @@ snapshots: hast-util-from-parse5: 8.0.3 parse5: 7.3.0 vfile: 6.0.3 - vfile-message: 4.0.2 + vfile-message: 4.0.3 hast-util-from-parse5@8.0.3: dependencies: @@ -3554,7 +3556,7 @@ snapshots: hast-util-to-estree@3.1.3: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/estree-jsx': 1.0.5 '@types/hast': 3.0.4 comma-separated-tokens: 2.0.3 @@ -3567,7 +3569,7 @@ snapshots: mdast-util-mdxjs-esm: 2.0.1 property-information: 7.1.0 space-separated-tokens: 2.0.2 - style-to-js: 1.1.16 + style-to-js: 1.1.17 unist-util-position: 5.0.0 zwitch: 2.0.4 transitivePeerDependencies: @@ -3589,7 +3591,7 @@ snapshots: hast-util-to-jsx-runtime@2.3.6: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/hast': 3.0.4 '@types/unist': 3.0.3 comma-separated-tokens: 2.0.3 @@ -3601,9 +3603,9 @@ snapshots: mdast-util-mdxjs-esm: 2.0.1 property-information: 7.1.0 space-separated-tokens: 2.0.2 - style-to-js: 1.1.16 + style-to-js: 1.1.17 unist-util-position: 5.0.0 - vfile-message: 4.0.2 + vfile-message: 4.0.3 transitivePeerDependencies: - supports-color @@ -3749,7 +3751,7 @@ snapshots: local-pkg@1.1.1: dependencies: mlly: 1.7.4 - pkg-types: 2.1.0 + pkg-types: 2.2.0 quansync: 0.2.10 lodash-es@4.17.21: {} @@ -3776,7 +3778,7 @@ snapshots: markdown-table@3.0.4: {} - marked@15.0.12: {} + marked@16.1.2: {} mathjax-full@3.2.2: dependencies: @@ -3796,7 +3798,7 @@ snapshots: dependencies: '@types/mdast': 4.0.4 '@types/unist': 3.0.3 - decode-named-character-reference: 1.1.0 + decode-named-character-reference: 1.2.0 devlop: 1.1.0 mdast-util-to-string: 4.0.0 micromark: 4.0.2 @@ -3913,7 +3915,7 @@ snapshots: parse-entities: 4.0.2 stringify-entities: 4.0.4 unist-util-stringify-position: 4.0.0 - vfile-message: 4.0.2 + vfile-message: 4.0.3 transitivePeerDependencies: - supports-color @@ -3975,15 +3977,15 @@ snapshots: merge2@1.4.1: {} - mermaid@11.6.0: + mermaid@11.9.0: dependencies: '@braintree/sanitize-url': 7.1.1 '@iconify/utils': 2.3.0 - '@mermaid-js/parser': 0.4.0 + '@mermaid-js/parser': 0.6.2 '@types/d3': 7.4.3 - cytoscape: 3.32.0 - cytoscape-cose-bilkent: 4.1.0(cytoscape@3.32.0) - cytoscape-fcose: 2.2.0(cytoscape@3.32.0) + cytoscape: 3.33.1 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.33.1) + cytoscape-fcose: 2.2.0(cytoscape@3.33.1) d3: 7.9.0 d3-sankey: 0.12.3 dagre-d3-es: 7.0.11 @@ -3992,7 +3994,7 @@ snapshots: katex: 0.16.22 khroma: 2.1.0 lodash-es: 4.17.21 - marked: 15.0.12 + marked: 16.1.2 roughjs: 4.6.6 stylis: 4.3.6 ts-dedent: 2.2.0 @@ -4004,7 +4006,7 @@ snapshots: micromark-core-commonmark@2.0.3: dependencies: - decode-named-character-reference: 1.1.0 + decode-named-character-reference: 1.2.0 devlop: 1.1.0 micromark-factory-destination: 2.0.1 micromark-factory-label: 2.0.1 @@ -4098,7 +4100,7 @@ snapshots: micromark-extension-mdx-expression@3.0.1: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 micromark-factory-mdx-expression: 2.0.3 micromark-factory-space: 2.0.1 @@ -4109,7 +4111,7 @@ snapshots: micromark-extension-mdx-jsx@3.0.2: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 estree-util-is-identifier-name: 3.0.0 micromark-factory-mdx-expression: 2.0.3 @@ -4118,7 +4120,7 @@ snapshots: micromark-util-events-to-acorn: 2.0.3 micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - vfile-message: 4.0.2 + vfile-message: 4.0.3 micromark-extension-mdx-md@2.0.0: dependencies: @@ -4126,7 +4128,7 @@ snapshots: micromark-extension-mdxjs-esm@3.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 micromark-core-commonmark: 2.0.3 micromark-util-character: 2.1.1 @@ -4134,12 +4136,12 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 unist-util-position-from-estree: 2.0.0 - vfile-message: 4.0.2 + vfile-message: 4.0.3 micromark-extension-mdxjs@3.0.0: dependencies: - acorn: 8.14.1 - acorn-jsx: 5.3.2(acorn@8.14.1) + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) micromark-extension-mdx-expression: 3.0.1 micromark-extension-mdx-jsx: 3.0.2 micromark-extension-mdx-md: 2.0.0 @@ -4162,7 +4164,7 @@ snapshots: micromark-factory-mdx-expression@2.0.3: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 devlop: 1.1.0 micromark-factory-space: 2.0.1 micromark-util-character: 2.1.1 @@ -4170,7 +4172,7 @@ snapshots: micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 unist-util-position-from-estree: 2.0.0 - vfile-message: 4.0.2 + vfile-message: 4.0.3 micromark-factory-space@2.0.1: dependencies: @@ -4217,7 +4219,7 @@ snapshots: micromark-util-decode-string@2.0.1: dependencies: - decode-named-character-reference: 1.1.0 + decode-named-character-reference: 1.2.0 micromark-util-character: 2.1.1 micromark-util-decode-numeric-character-reference: 2.0.2 micromark-util-symbol: 2.0.1 @@ -4226,13 +4228,13 @@ snapshots: micromark-util-events-to-acorn@2.0.3: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/unist': 3.0.3 devlop: 1.1.0 estree-util-visit: 2.0.0 micromark-util-symbol: 2.0.1 micromark-util-types: 2.0.2 - vfile-message: 4.0.2 + vfile-message: 4.0.3 micromark-util-html-tag-name@2.0.1: {} @@ -4265,7 +4267,7 @@ snapshots: dependencies: '@types/debug': 4.1.12 debug: 4.4.1 - decode-named-character-reference: 1.1.0 + decode-named-character-reference: 1.2.0 devlop: 1.1.0 micromark-core-commonmark: 2.0.3 micromark-factory-space: 2.0.1 @@ -4292,7 +4294,7 @@ snapshots: minimatch@9.0.5: dependencies: - brace-expansion: 2.0.1 + brace-expansion: 2.0.2 minipass@7.1.2: {} @@ -4300,7 +4302,7 @@ snapshots: mlly@1.7.4: dependencies: - acorn: 8.14.1 + acorn: 8.15.0 pathe: 2.0.3 pkg-types: 1.3.1 ufo: 1.6.1 @@ -4317,58 +4319,58 @@ snapshots: negotiator@1.0.0: {} - next-themes@0.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next-themes@0.4.6(react-dom@18.3.1)(react@18.3.1): dependencies: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - next@14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + next@14.2.31(react-dom@18.3.1)(react@18.3.1): dependencies: - '@next/env': 14.2.29 + '@next/env': 14.2.31 '@swc/helpers': 0.5.5 busboy: 1.6.0 - caniuse-lite: 1.0.30001721 + caniuse-lite: 1.0.30001735 graceful-fs: 4.2.11 postcss: 8.4.31 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) styled-jsx: 5.1.1(react@18.3.1) optionalDependencies: - '@next/swc-darwin-arm64': 14.2.29 - '@next/swc-darwin-x64': 14.2.29 - '@next/swc-linux-arm64-gnu': 14.2.29 - '@next/swc-linux-arm64-musl': 14.2.29 - '@next/swc-linux-x64-gnu': 14.2.29 - '@next/swc-linux-x64-musl': 14.2.29 - '@next/swc-win32-arm64-msvc': 14.2.29 - '@next/swc-win32-ia32-msvc': 14.2.29 - '@next/swc-win32-x64-msvc': 14.2.29 + '@next/swc-darwin-arm64': 14.2.31 + '@next/swc-darwin-x64': 14.2.31 + '@next/swc-linux-arm64-gnu': 14.2.31 + '@next/swc-linux-arm64-musl': 14.2.31 + '@next/swc-linux-x64-gnu': 14.2.31 + '@next/swc-linux-x64-musl': 14.2.31 + '@next/swc-win32-arm64-msvc': 14.2.31 + '@next/swc-win32-ia32-msvc': 14.2.31 + '@next/swc-win32-x64-msvc': 14.2.31 transitivePeerDependencies: - '@babel/core' - babel-plugin-macros - nextra-theme-docs@3.3.1(next@14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(nextra@3.3.1(@types/react@18.3.23)(acorn@8.14.1)(next@14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@4.9.5))(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + nextra-theme-docs@3.3.1(next@14.2.31)(nextra@3.3.1)(react-dom@18.3.1)(react@18.3.1): dependencies: - '@headlessui/react': 2.2.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@headlessui/react': 2.2.7(react-dom@18.3.1)(react@18.3.1) clsx: 2.1.1 escape-string-regexp: 5.0.0 flexsearch: 0.7.43 - next: 14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - next-themes: 0.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - nextra: 3.3.1(@types/react@18.3.23)(acorn@8.14.1)(next@14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@4.9.5) + next: 14.2.31(react-dom@18.3.1)(react@18.3.1) + next-themes: 0.4.6(react-dom@18.3.1)(react@18.3.1) + nextra: 3.3.1(@types/react@18.3.23)(acorn@8.15.0)(next@14.2.31)(react-dom@18.3.1)(react@18.3.1)(typescript@5.9.2) react: 18.3.1 react-dom: 18.3.1(react@18.3.1) scroll-into-view-if-needed: 3.1.0 - zod: 3.25.51 + zod: 3.25.76 - nextra@3.3.1(@types/react@18.3.23)(acorn@8.14.1)(next@14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@4.9.5): + nextra@3.3.1(@types/react@18.3.23)(acorn@8.15.0)(next@14.2.31)(react-dom@18.3.1)(react@18.3.1)(typescript@5.9.2): dependencies: '@formatjs/intl-localematcher': 0.5.10 - '@headlessui/react': 2.2.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@mdx-js/mdx': 3.1.0(acorn@8.14.1) + '@headlessui/react': 2.2.7(react-dom@18.3.1)(react@18.3.1) + '@mdx-js/mdx': 3.1.0(acorn@8.15.0) '@mdx-js/react': 3.1.0(@types/react@18.3.23)(react@18.3.1) - '@napi-rs/simple-git': 0.1.19 - '@shikijs/twoslash': 1.29.2(typescript@4.9.5) + '@napi-rs/simple-git': 0.1.21 + '@shikijs/twoslash': 1.29.2(typescript@5.9.2) '@theguild/remark-mermaid': 0.1.3(react@18.3.1) '@theguild/remark-npm2yarn': 0.3.3 better-react-mathjax: 2.3.0(react@18.3.1) @@ -4384,11 +4386,11 @@ snapshots: mdast-util-gfm: 3.1.0 mdast-util-to-hast: 13.2.0 negotiator: 1.0.0 - next: 14.2.29(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + next: 14.2.31(react-dom@18.3.1)(react@18.3.1) p-limit: 6.2.0 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-medium-image-zoom: 5.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-medium-image-zoom: 5.3.0(react-dom@18.3.1)(react@18.3.1) rehype-katex: 7.0.1 rehype-pretty-code: 0.14.0(shiki@1.29.2) rehype-raw: 7.0.0 @@ -4402,9 +4404,9 @@ snapshots: title: 4.0.1 unist-util-remove: 4.0.0 unist-util-visit: 5.0.0 - yaml: 2.8.0 - zod: 3.25.51 - zod-validation-error: 3.4.1(zod@3.25.51) + yaml: 2.8.1 + zod: 3.25.76 + zod-validation-error: 3.5.3(zod@3.25.76) transitivePeerDependencies: - '@types/react' - acorn @@ -4454,7 +4456,7 @@ snapshots: '@types/unist': 2.0.11 character-entities-legacy: 3.0.0 character-reference-invalid: 2.0.1 - decode-named-character-reference: 1.1.0 + decode-named-character-reference: 1.2.0 is-alphanumerical: 2.0.1 is-decimal: 2.0.1 is-hexadecimal: 2.0.1 @@ -4472,7 +4474,7 @@ snapshots: parse5@7.3.0: dependencies: - entities: 6.0.0 + entities: 6.0.1 path-data-parser@0.1.0: {} @@ -4503,10 +4505,10 @@ snapshots: mlly: 1.7.4 pathe: 2.0.3 - pkg-types@2.1.0: + pkg-types@2.2.0: dependencies: confbox: 0.2.2 - exsolve: 1.0.5 + exsolve: 1.0.7 pathe: 2.0.3 points-on-curve@0.2.0: {} @@ -4516,28 +4518,27 @@ snapshots: path-data-parser: 0.1.0 points-on-curve: 0.2.0 - postcss-import@15.1.0(postcss@8.5.4): + postcss-import@15.1.0(postcss@8.5.6): dependencies: - postcss: 8.5.4 + postcss: 8.5.6 postcss-value-parser: 4.2.0 read-cache: 1.0.0 resolve: 1.22.10 - postcss-js@4.0.1(postcss@8.5.4): + postcss-js@4.0.1(postcss@8.5.6): dependencies: camelcase-css: 2.0.1 - postcss: 8.5.4 + postcss: 8.5.6 - postcss-load-config@4.0.2(postcss@8.5.4): + postcss-load-config@4.0.2(postcss@8.5.6): dependencies: lilconfig: 3.1.3 - yaml: 2.8.0 - optionalDependencies: - postcss: 8.5.4 + postcss: 8.5.6 + yaml: 2.8.1 - postcss-nested@6.2.0(postcss@8.5.4): + postcss-nested@6.2.0(postcss@8.5.6): dependencies: - postcss: 8.5.4 + postcss: 8.5.6 postcss-selector-parser: 6.1.2 postcss-selector-parser@6.1.2: @@ -4553,22 +4554,22 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.1 - postcss@8.5.4: + postcss@8.5.6: dependencies: nanoid: 3.3.11 picocolors: 1.1.1 source-map-js: 1.2.1 - posthog-js@1.249.3: + posthog-js@1.260.1: dependencies: - core-js: 3.42.0 + core-js: 3.45.0 fflate: 0.4.8 - preact: 10.26.8 + preact: 10.27.0 web-vitals: 4.2.4 - preact@10.26.8: {} + preact@10.27.0: {} - prettier@3.5.3: {} + prettier@3.6.2: {} prop-types@15.8.1: dependencies: @@ -4601,35 +4602,35 @@ snapshots: react: 18.3.1 rfdc: 1.4.1 - react-medium-image-zoom@5.2.14(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-medium-image-zoom@5.3.0(react-dom@18.3.1)(react@18.3.1): dependencies: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-smooth@4.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-smooth@4.0.4(react-dom@18.3.1)(react@18.3.1): dependencies: fast-equals: 5.2.2 prop-types: 15.8.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-transition-group: 4.4.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-transition-group: 4.4.5(react-dom@18.3.1)(react@18.3.1) - react-transition-group@4.4.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-transition-group@4.4.5(react-dom@18.3.1)(react@18.3.1): dependencies: - '@babel/runtime': 7.27.6 + '@babel/runtime': 7.28.3 dom-helpers: 5.2.1 loose-envify: 1.4.0 prop-types: 15.8.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - react-tweet@3.2.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + react-tweet@3.2.2(react-dom@18.3.1)(react@18.3.1): dependencies: '@swc/helpers': 0.5.17 clsx: 2.1.1 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - swr: 2.3.3(react@18.3.1) + swr: 2.3.6(react@18.3.1) react@18.3.1: dependencies: @@ -4649,7 +4650,7 @@ snapshots: dependencies: decimal.js-light: 2.5.1 - recharts@2.15.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + recharts@2.15.4(react-dom@18.3.1)(react@18.3.1): dependencies: clsx: 2.1.1 eventemitter3: 4.0.7 @@ -4657,37 +4658,36 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) react-is: 18.3.1 - react-smooth: 4.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-smooth: 4.0.4(react-dom@18.3.1)(react@18.3.1) recharts-scale: 0.4.5 tiny-invariant: 1.3.3 victory-vendor: 36.9.2 recma-build-jsx@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 estree-util-build-jsx: 3.0.1 vfile: 6.0.3 - recma-jsx@1.0.0(acorn@8.14.1): + recma-jsx@1.0.1(acorn@8.15.0): dependencies: - acorn-jsx: 5.3.2(acorn@8.14.1) + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) estree-util-to-js: 2.0.0 recma-parse: 1.0.0 recma-stringify: 1.0.0 unified: 11.0.5 - transitivePeerDependencies: - - acorn recma-parse@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 esast-util-from-js: 2.0.1 unified: 11.0.5 vfile: 6.0.3 recma-stringify@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 estree-util-to-js: 2.0.0 unified: 11.0.5 vfile: 6.0.3 @@ -4737,7 +4737,7 @@ snapshots: rehype-recma@1.0.0: dependencies: - '@types/estree': 1.0.7 + '@types/estree': 1.0.8 '@types/hast': 3.0.4 hast-util-to-estree: 3.1.3 transitivePeerDependencies: @@ -4904,7 +4904,7 @@ snapshots: source-map-js@1.2.1: {} - source-map@0.7.4: {} + source-map@0.7.6: {} space-separated-tokens@2.0.2: {} @@ -4947,11 +4947,11 @@ snapshots: strip-final-newline@3.0.0: {} - style-to-js@1.1.16: + style-to-js@1.1.17: dependencies: - style-to-object: 1.0.8 + style-to-object: 1.0.9 - style-to-object@1.0.8: + style-to-object@1.0.9: dependencies: inline-style-parser: 0.2.4 @@ -4964,7 +4964,7 @@ snapshots: sucrase@3.35.0: dependencies: - '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/gen-mapping': 0.3.13 commander: 4.1.1 glob: 10.4.5 lines-and-columns: 1.2.4 @@ -4974,7 +4974,7 @@ snapshots: supports-preserve-symlinks-flag@1.0.0: {} - swr@2.3.3(react@18.3.1): + swr@2.3.6(react@18.3.1): dependencies: dequal: 2.0.3 react: 18.3.1 @@ -5006,11 +5006,11 @@ snapshots: normalize-path: 3.0.0 object-hash: 3.0.0 picocolors: 1.1.1 - postcss: 8.5.4 - postcss-import: 15.1.0(postcss@8.5.4) - postcss-js: 4.0.1(postcss@8.5.4) - postcss-load-config: 4.0.2(postcss@8.5.4) - postcss-nested: 6.2.0(postcss@8.5.4) + postcss: 8.5.6 + postcss-import: 15.1.0(postcss@8.5.6) + postcss-js: 4.0.1(postcss@8.5.6) + postcss-load-config: 4.0.2(postcss@8.5.6) + postcss-nested: 6.2.0(postcss@8.5.6) postcss-selector-parser: 6.1.2 resolve: 1.22.10 sucrase: 3.35.0 @@ -5032,7 +5032,7 @@ snapshots: title@4.0.1: dependencies: arg: 5.0.2 - chalk: 5.4.1 + chalk: 5.5.0 clipboardy: 4.0.0 to-regex-range@5.0.1: @@ -5051,15 +5051,15 @@ snapshots: twoslash-protocol@0.2.12: {} - twoslash@0.2.12(typescript@4.9.5): + twoslash@0.2.12(typescript@5.9.2): dependencies: - '@typescript/vfs': 1.6.1(typescript@4.9.5) + '@typescript/vfs': 1.6.1(typescript@5.9.2) twoslash-protocol: 0.2.12 - typescript: 4.9.5 + typescript: 5.9.2 transitivePeerDependencies: - supports-color - typescript@4.9.5: {} + typescript@5.9.2: {} ufo@1.6.1: {} @@ -5140,9 +5140,9 @@ snapshots: unist-util-is: 6.0.0 unist-util-visit-parents: 6.0.1 - update-browserslist-db@1.1.3(browserslist@4.25.0): + update-browserslist-db@1.1.3(browserslist@4.25.2): dependencies: - browserslist: 4.25.0 + browserslist: 4.25.2 escalade: 3.2.0 picocolors: 1.1.1 @@ -5159,7 +5159,7 @@ snapshots: '@types/unist': 3.0.3 vfile: 6.0.3 - vfile-message@4.0.2: + vfile-message@4.0.3: dependencies: '@types/unist': 3.0.3 unist-util-stringify-position: 4.0.0 @@ -5167,7 +5167,7 @@ snapshots: vfile@6.0.3: dependencies: '@types/unist': 3.0.3 - vfile-message: 4.0.2 + vfile-message: 4.0.3 victory-vendor@36.9.2: dependencies: @@ -5225,14 +5225,14 @@ snapshots: string-width: 5.1.2 strip-ansi: 7.1.0 - yaml@2.8.0: {} + yaml@2.8.1: {} yocto-queue@1.2.1: {} - zod-validation-error@3.4.1(zod@3.25.51): + zod-validation-error@3.5.3(zod@3.25.76): dependencies: - zod: 3.25.51 + zod: 3.25.76 - zod@3.25.51: {} + zod@3.25.76: {} zwitch@2.0.4: {} diff --git a/frontend/snips/package.json b/frontend/snips/package.json index 3257021bc..e3433d16e 100644 --- a/frontend/snips/package.json +++ b/frontend/snips/package.json @@ -27,15 +27,18 @@ "generate": "rm -rf out && mkdir -p out && cp -r ../../examples ./out/examples && tsc && ts-node src/scripts/build-tree.ts", "test": "jest", "test:watch": "jest --watch", - "copy:docs": "rm -rf ../docs/lib/generated/snips && cp -r ./out/snips ../docs/lib/generated/snips", - "copy:app": "rm -rf ../app/src/next/lib/docs/generated/snips && cp -r ./out/snips ../app/src/next/lib/docs/generated/snips && find ../app/src/next/lib/docs/generated/snips -type f -exec sed -i '' 's|@/lib/generated/snips/|@/next/lib/docs/generated/snips/|g' {} +", + "copy:docs": "rm -rf ../docs/lib/generated/snips && mkdir -p ../docs/lib/generated && cp -r ./out/snips ../docs/lib/generated/snips", "copy:examples": "rm -rf ../../examples && cp -r ./out/examples ../../", - "copy:all": "npm run copy:docs && npm run copy:app && npm run copy:examples", + "copy:all": "npm run copy:docs && npm run copy:examples", "generate:copy": "npm run generate && npm run copy:all" }, + "dependencies": { + "@types/node": "^20.17.28", + "ts-node": "^10.9.2", + "typescript": "^5.8.2" + }, "devDependencies": { "@types/jest": "^29.5.14", - "@types/node": "^20.17.28", "@typescript-eslint/eslint-plugin": "^6.21.0", "@typescript-eslint/parser": "^6.21.0", "eslint": "^8.57.1", @@ -45,9 +48,7 @@ "eslint-plugin-unused-imports": "^3.2.0", "jest": "^29.7.0", "prettier": "^3.5.3", - "ts-jest": "^29.3.2", - "ts-node": "^10.9.2", - "typescript": "^5.8.2" + "ts-jest": "^29.3.2" }, "peerDependencies": { "typescript": ">=4.7.4" diff --git a/frontend/snips/pnpm-lock.yaml b/frontend/snips/pnpm-lock.yaml index 20f8c8f4a..ccfdf9cf8 100644 --- a/frontend/snips/pnpm-lock.yaml +++ b/frontend/snips/pnpm-lock.yaml @@ -4,13 +4,21 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false +dependencies: + '@types/node': + specifier: ^20.17.28 + version: 20.19.10 + ts-node: + specifier: ^10.9.2 + version: 10.9.2(@types/node@20.19.10)(typescript@5.9.2) + typescript: + specifier: ^5.8.2 + version: 5.9.2 + devDependencies: '@types/jest': specifier: ^29.5.14 version: 29.5.14 - '@types/node': - specifier: ^20.17.28 - version: 20.19.10 '@typescript-eslint/eslint-plugin': specifier: ^6.21.0 version: 6.21.0(@typescript-eslint/parser@6.21.0)(eslint@8.57.1)(typescript@5.9.2) @@ -41,12 +49,6 @@ devDependencies: ts-jest: specifier: ^29.3.2 version: 29.4.1(@babel/core@7.28.0)(jest@29.7.0)(typescript@5.9.2) - ts-node: - specifier: ^10.9.2 - version: 10.9.2(@types/node@20.19.10)(typescript@5.9.2) - typescript: - specifier: ^5.8.2 - version: 5.9.2 packages: @@ -382,7 +384,6 @@ packages: engines: {node: '>=12'} dependencies: '@jridgewell/trace-mapping': 0.3.9 - dev: true /@eslint-community/eslint-utils@4.7.0(eslint@8.57.1): resolution: {integrity: sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==} @@ -683,11 +684,9 @@ packages: /@jridgewell/resolve-uri@3.1.2: resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} engines: {node: '>=6.0.0'} - dev: true /@jridgewell/sourcemap-codec@1.5.4: resolution: {integrity: sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==} - dev: true /@jridgewell/trace-mapping@0.3.29: resolution: {integrity: sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==} @@ -701,7 +700,6 @@ packages: dependencies: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.4 - dev: true /@nodelib/fs.scandir@2.1.5: resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} @@ -751,19 +749,15 @@ packages: /@tsconfig/node10@1.0.11: resolution: {integrity: sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==} - dev: true /@tsconfig/node12@1.0.11: resolution: {integrity: sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==} - dev: true /@tsconfig/node14@1.0.3: resolution: {integrity: sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==} - dev: true /@tsconfig/node16@1.0.4: resolution: {integrity: sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==} - dev: true /@types/babel__core@7.20.5: resolution: {integrity: sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==} @@ -835,7 +829,6 @@ packages: resolution: {integrity: sha512-iAFpG6DokED3roLSP0K+ybeDdIX6Bc0Vd3mLW5uDqThPWtNos3E+EqOM11mPQHKzfWHqEBuLjIlsBQQ8CsISmQ==} dependencies: undici-types: 6.21.0 - dev: true /@types/semver@7.7.0: resolution: {integrity: sha512-k107IF4+Xr7UHjwDc7Cfd6PRQfbdkiRabXGRjo07b4WyPahFBZCZ1sE+BNxYIJPPg73UkfOsVOLwqVc/6ETrIA==} @@ -1004,13 +997,11 @@ packages: engines: {node: '>=0.4.0'} dependencies: acorn: 8.15.0 - dev: true /acorn@8.15.0: resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} engines: {node: '>=0.4.0'} hasBin: true - dev: true /ajv@6.12.6: resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} @@ -1055,7 +1046,6 @@ packages: /arg@4.1.3: resolution: {integrity: sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==} - dev: true /argparse@1.0.10: resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} @@ -1404,7 +1394,6 @@ packages: /create-require@1.1.1: resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - dev: true /cross-spawn@7.0.6: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} @@ -1514,7 +1503,6 @@ packages: /diff@4.0.2: resolution: {integrity: sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==} engines: {node: '>=0.3.1'} - dev: true /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} @@ -3095,7 +3083,6 @@ packages: /make-error@1.3.6: resolution: {integrity: sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==} - dev: true /makeerror@1.0.12: resolution: {integrity: sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==} @@ -3869,7 +3856,6 @@ packages: typescript: 5.9.2 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 - dev: true /tsconfig-paths@3.15.0: resolution: {integrity: sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==} @@ -3956,7 +3942,6 @@ packages: resolution: {integrity: sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==} engines: {node: '>=14.17'} hasBin: true - dev: true /uglify-js@3.19.3: resolution: {integrity: sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==} @@ -3978,7 +3963,6 @@ packages: /undici-types@6.21.0: resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} - dev: true /update-browserslist-db@1.1.3(browserslist@4.25.2): resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==} @@ -3999,7 +3983,6 @@ packages: /v8-compile-cache-lib@3.0.1: resolution: {integrity: sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==} - dev: true /v8-to-istanbul@9.3.0: resolution: {integrity: sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==} @@ -4137,7 +4120,6 @@ packages: /yn@3.1.1: resolution: {integrity: sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==} engines: {node: '>=6'} - dev: true /yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} diff --git a/frontend/snips/src/processors/snippets/snippet.processor.ts b/frontend/snips/src/processors/snippets/snippet.processor.ts index 2ed627f7f..bac488451 100644 --- a/frontend/snips/src/processors/snippets/snippet.processor.ts +++ b/frontend/snips/src/processors/snippets/snippet.processor.ts @@ -1,8 +1,9 @@ -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { getConfig } from '../../utils/config'; import { Snippet, LANGUAGE_MAP, Block, Highlight } from '../../types'; import { ContentProcessor, DirectoryProcessor, Processor } from '../processor.interface'; -import path from 'path'; +import * as path from 'path'; +import { Dirent } from 'fs'; const TOKENS = { BLOCK: { @@ -201,33 +202,33 @@ const processDirectory: DirectoryProcessor = async ({ dir }) => { const entries = await fs.readdir(dir, { withFileTypes: true }); const snippets = entries.filter( - (entry) => entry.isFile() && entry.name.endsWith('.ts') && entry.name !== 'index.ts', + (entry: Dirent) => entry.isFile() && entry.name.endsWith('.ts') && entry.name !== 'index.ts', ); - const directories = entries.filter((entry) => entry.isDirectory()); + const directories = entries.filter((entry: Dirent) => entry.isDirectory()); if (snippets.length === 0 && directories.length === 0) { return; } // Generate import and export statements for files - const fileImports = snippets.map((file) => { + const fileImports = snippets.map((file: Dirent) => { console.log(file.name); const baseName = sanitizeFileName(file.name); return `import ${baseName} from './${file.name.replaceAll('.ts', '')}';`; }); - const fileExports = snippets.map((file) => { + const fileExports = snippets.map((file: Dirent) => { const baseName = sanitizeFileName(file.name); return `export { ${baseName} }`; }); // Generate import and export statements for directories - const dirImports = directories.map((dir) => { + const dirImports = directories.map((dir: Dirent) => { const importName = sanitizeFileName(dir.name); return `import * as ${importName} from './${dir.name}';`; }); - const dirExports = directories.map((dir) => { + const dirExports = directories.map((dir: Dirent) => { const importName = sanitizeFileName(dir.name); return `export { ${importName} };`; }); diff --git a/frontend/snips/src/scripts/build-tree.test.ts b/frontend/snips/src/scripts/build-tree.test.ts index 0c0b842a5..ba6838f53 100644 --- a/frontend/snips/src/scripts/build-tree.test.ts +++ b/frontend/snips/src/scripts/build-tree.test.ts @@ -1,5 +1,5 @@ import { processFiles } from './build-tree'; -import * as fs from 'fs/promises'; +import { promises as fs } from 'fs'; import * as path from 'path'; async function compareDirectories(actualDir: string, expectedDir: string) { diff --git a/frontend/snips/src/scripts/build-tree.ts b/frontend/snips/src/scripts/build-tree.ts index 387542061..5d1f0ed87 100644 --- a/frontend/snips/src/scripts/build-tree.ts +++ b/frontend/snips/src/scripts/build-tree.ts @@ -1,7 +1,7 @@ import { getConfig } from '../utils/config'; -import fs from 'fs/promises'; +import { promises as fs } from 'fs'; import { Dirent } from 'fs'; -import path from 'path'; +import * as path from 'path'; import { clean, restore } from './clean-build'; import { colors } from '../utils/colors'; import { Processor } from '@/processors/processor.interface'; @@ -180,7 +180,7 @@ export const processDirectory = async ( console.log(`${colors.cyan}Found ${entries.length} entries in ${sourceDir}${colors.reset}`); await Promise.all( - entries.map(async (entry) => { + entries.map(async (entry: Dirent) => { const sourcePath = path.join(sourceDir, entry.name); await processEntry(entry, sourcePath, outputDir, ignoreList, PROCESSORS); }), diff --git a/frontend/snips/src/scripts/clean-build.ts b/frontend/snips/src/scripts/clean-build.ts index 716d0c4f1..3917220ef 100644 --- a/frontend/snips/src/scripts/clean-build.ts +++ b/frontend/snips/src/scripts/clean-build.ts @@ -1,5 +1,5 @@ -import fs from 'fs/promises'; -import path from 'path'; +import { promises as fs } from 'fs'; +import * as path from 'path'; import { colors } from '../utils/colors'; import { tmpdir } from 'os'; /** diff --git a/frontend/snips/tsconfig.json b/frontend/snips/tsconfig.json index 9cf139819..276bf7521 100644 --- a/frontend/snips/tsconfig.json +++ b/frontend/snips/tsconfig.json @@ -4,6 +4,7 @@ "module": "CommonJS", "moduleResolution": "node", "lib": ["ES2021", "DOM"], + "types": ["node"], "outDir": "dist", "declaration": true, "declarationMap": true, @@ -19,5 +20,5 @@ } }, "include": ["src/**/*.ts", "scripts/generate-snips.ts", "scripts/copy-examples.ts"], - "exclude": ["node_modules", "dist"] + "exclude": ["node_modules", "dist", "**/*.test.ts"] } diff --git a/hack/oas/generate-clients.sh b/hack/oas/generate-clients.sh index def55f5c8..653b5051b 100644 --- a/hack/oas/generate-clients.sh +++ b/hack/oas/generate-clients.sh @@ -4,4 +4,4 @@ set -eux go run github.com/deepmap/oapi-codegen/v2/cmd/oapi-codegen@v2.0.0 -config ./pkg/client/rest/codegen.yaml ./bin/oas/openapi.yaml -cd frontend/app/ && (pnpm swagger-typescript-api generate -p ../../bin/oas/openapi.yaml -o ./src/lib/api/generated -n hatchet.ts --modular --axios) +cd frontend/app/ && (npx swagger-typescript-api@13.2.7 generate -p ../../bin/oas/openapi.yaml -o ./src/lib/api/generated -n hatchet.ts --modular --axios)