mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2026-01-04 23:59:49 -06:00
* fix: register durable steps and workflows separately * chore: initial copy of pooled listener * feat: initial generic impl * feat: use pooled listener for wf run listener * refactor: move listeners to subdir * feat: refactor durable event listener * fix: bug * feat: share single pooled workflow listener and event listener everywhere * cruft: rm hatchet fixture * fix: rebase issue * feat: remove asyncio api client in favor of sync one * chore: minor version * proposal: crazy hack idea to make the workflow run listener work * fix: sleeps and error handling * Revert "cruft: rm hatchet fixture" This reverts commit b75f625e6ccec095e8c4e294d6727db166796411. * fix: set timeout * fix: rm pytest-timeout * fix: rm retry * fix: use v1 by default * fix: try removing retry state * fix: try using async client? * fix: try running sequentially * debug: loop * debug: maybe it's this? * fix: lint * fix: re-remove unused fixtures * fix: lazily create clients in admin client * fix: default * fix: lazily initialize dispatcher client * fix: hint * fix: no. way. * feat: add back retries in ci * fix: clients + imports * fix: loop scope * debug: try running skipped tests in ci again * Revert "debug: try running skipped tests in ci again" This reverts commit 8d9e18150e5207ee6051d8df8a6fe2a7504c722e. * fix: rm duped code * refactor: rename everything as `to_proto` * refactor: removals of `namespace` being passed around * fix: task output stupidity * feat: add deprecation warning * fix: remove more unused code * feat: mix sync and async in dag example * fix: autouse * fix: more input types * feat: remove ability to pass in loop * fix: overload key gen
63 lines
1.4 KiB
Python
63 lines
1.4 KiB
Python
import random
|
|
import time
|
|
from datetime import timedelta
|
|
|
|
from pydantic import BaseModel
|
|
|
|
from hatchet_sdk import Context, EmptyModel, Hatchet
|
|
|
|
|
|
class StepOutput(BaseModel):
|
|
random_number: int
|
|
|
|
|
|
class RandomSum(BaseModel):
|
|
sum: int
|
|
|
|
|
|
hatchet = Hatchet(debug=True)
|
|
|
|
dag_workflow = hatchet.workflow(name="DAGWorkflow")
|
|
|
|
|
|
@dag_workflow.task(execution_timeout=timedelta(seconds=5))
|
|
def step1(input: EmptyModel, ctx: Context) -> StepOutput:
|
|
return StepOutput(random_number=random.randint(1, 100))
|
|
|
|
|
|
@dag_workflow.task(execution_timeout=timedelta(seconds=5))
|
|
async def step2(input: EmptyModel, ctx: Context) -> StepOutput:
|
|
return StepOutput(random_number=random.randint(1, 100))
|
|
|
|
|
|
@dag_workflow.task(parents=[step1, step2])
|
|
async def step3(input: EmptyModel, ctx: Context) -> RandomSum:
|
|
one = ctx.task_output(step1).random_number
|
|
two = (await ctx.task_output(step2)).random_number
|
|
|
|
return RandomSum(sum=one + two)
|
|
|
|
|
|
@dag_workflow.task(parents=[step1, step3])
|
|
async def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:
|
|
print(
|
|
"executed step4",
|
|
time.strftime("%H:%M:%S", time.localtime()),
|
|
input,
|
|
ctx.task_output(step1),
|
|
await ctx.task_output(step3),
|
|
)
|
|
return {
|
|
"step4": "step4",
|
|
}
|
|
|
|
|
|
def main() -> None:
|
|
worker = hatchet.worker("dag-worker", workflows=[dag_workflow])
|
|
|
|
worker.start()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|