mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2026-03-20 19:50:47 -05:00
* feat: initial rpc * chore: gen python * feat: add more fields * chore: gen again * fix: finish cleaning up python * feat: start wiring up api * fix: panic * feat: start implementing getters * fix: improve api * feat: expand return type a bit * feat: more wiring * feat: more wiring * fix: finish wiring up input reads * fix: admin client cleanup * fix: ordering * fix: add all_finished param * feat: wire up all finished * fix: propagate allfinished * fix: propagate external ids * chore: gen protos again * fix: one more thing * chore: rename * chore: rename * chore: fix typing * fix: cleanup * feat: add queued default * fix: wiring * feat: running check * fix: query * chore: rm print * fix: edge case handling * feat: python test * feat: add `done` field * feat: pass `done` through * fix: test done flag * fix: cleanup * fix: handle cancelled * refactor: clean up implementations of status handling * fix: feedback * fix: done logic * fix: export run status * fix: couple small bugs * fix: handle done * fix: properly extract input * fix: bug with sequential dags * refactor: improve performance of lookup query slightly * refactor: add helpers on V1StepRunData for getting input + parsing bytes * refactor: create listutils internal package * refactor: status derivation * fix: rm unused method * fix: sqlcv1 import * fix: error log * fix: 404 on not found * feat: changelog, async method
110 lines
4.0 KiB
Python
110 lines
4.0 KiB
Python
from examples.affinity_workers.worker import affinity_worker_workflow
|
|
from examples.bug_tests.payload_bug_on_replay.worker import (
|
|
payload_initial_cancel_bug_workflow,
|
|
)
|
|
from examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf
|
|
from examples.bulk_operations.worker import (
|
|
bulk_replay_test_1,
|
|
bulk_replay_test_2,
|
|
bulk_replay_test_3,
|
|
)
|
|
from examples.cancellation.worker import cancellation_workflow
|
|
from examples.concurrency_cancel_in_progress.worker import (
|
|
concurrency_cancel_in_progress_workflow,
|
|
)
|
|
from examples.concurrency_cancel_newest.worker import concurrency_cancel_newest_workflow
|
|
from examples.concurrency_limit.worker import concurrency_limit_workflow
|
|
from examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow
|
|
from examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow
|
|
from examples.concurrency_workflow_level.worker import (
|
|
concurrency_workflow_level_workflow,
|
|
)
|
|
from examples.conditions.worker import task_condition_workflow
|
|
from examples.dag.worker import dag_workflow
|
|
from examples.dataclasses.worker import say_hello
|
|
from examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf
|
|
from examples.dependency_injection.worker import (
|
|
async_task_with_dependencies,
|
|
di_workflow,
|
|
durable_async_task_with_dependencies,
|
|
durable_sync_task_with_dependencies,
|
|
sync_task_with_dependencies,
|
|
)
|
|
from examples.dict_input.worker import say_hello_unsafely
|
|
from examples.durable.worker import durable_workflow, wait_for_sleep_twice
|
|
from examples.events.worker import event_workflow
|
|
from examples.fanout.worker import child_wf, parent_wf
|
|
from examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent
|
|
from examples.lifespans.simple import lifespan, lifespan_task
|
|
from examples.logger.workflow import logging_workflow
|
|
from examples.non_retryable.worker import non_retryable_workflow
|
|
from examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details
|
|
from examples.return_exceptions.worker import return_exceptions_task
|
|
from examples.run_details.worker import run_detail_test_workflow
|
|
from examples.simple.worker import simple, simple_durable
|
|
from examples.timeout.worker import refresh_timeout_wf, timeout_wf
|
|
from examples.webhooks.worker import webhook
|
|
from hatchet_sdk import Hatchet
|
|
|
|
hatchet = Hatchet(debug=True)
|
|
|
|
|
|
def main() -> None:
|
|
worker = hatchet.worker(
|
|
"e2e-test-worker",
|
|
slots=100,
|
|
workflows=[
|
|
affinity_worker_workflow,
|
|
bulk_child_wf,
|
|
bulk_parent_wf,
|
|
concurrency_limit_workflow,
|
|
concurrency_limit_rr_workflow,
|
|
concurrency_multiple_keys_workflow,
|
|
dag_workflow,
|
|
dedupe_child_wf,
|
|
dedupe_parent_wf,
|
|
durable_workflow,
|
|
child_wf,
|
|
event_workflow,
|
|
parent_wf,
|
|
on_failure_wf,
|
|
on_failure_wf_with_details,
|
|
logging_workflow,
|
|
timeout_wf,
|
|
refresh_timeout_wf,
|
|
task_condition_workflow,
|
|
cancellation_workflow,
|
|
sync_fanout_parent,
|
|
sync_fanout_child,
|
|
non_retryable_workflow,
|
|
concurrency_workflow_level_workflow,
|
|
concurrency_cancel_newest_workflow,
|
|
concurrency_cancel_in_progress_workflow,
|
|
di_workflow,
|
|
payload_initial_cancel_bug_workflow,
|
|
run_detail_test_workflow,
|
|
lifespan_task,
|
|
simple,
|
|
simple_durable,
|
|
bulk_replay_test_1,
|
|
bulk_replay_test_2,
|
|
bulk_replay_test_3,
|
|
webhook,
|
|
return_exceptions_task,
|
|
wait_for_sleep_twice,
|
|
async_task_with_dependencies,
|
|
sync_task_with_dependencies,
|
|
durable_async_task_with_dependencies,
|
|
durable_sync_task_with_dependencies,
|
|
say_hello,
|
|
say_hello_unsafely,
|
|
],
|
|
lifespan=lifespan,
|
|
)
|
|
|
|
worker.start()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|