Files
hatchet/examples/python/fanout/worker.py
Matt Kaye f1f276f6dc Feat: Python task unit tests (#1990)
* feat: add mock run methods for tasks

* feat: docs

* feat: first pass at unit tests

* cleanup: split out tests

* feat: pass lifespan through

* fix: rm comment

* drive by: retry on 404 to help with races

* chore: changelog

* chore: ver

* feat: improve logging everywhere

* chore: changelog

* fix: rm print cruft

* feat: print statement linter

* feat: helper for getting result of a standalone

* feat: docs for mock run

* feat: add task run getter

* feat: propagate additional metadata properly

* chore: gen

* fix: date

* chore: gen

* feat: return exceptions

* chore: gen

* chore: changelog

* feat: tests + gen again

* fix: rm print cruft
2025-07-17 13:54:40 -04:00

73 lines
1.6 KiB
Python

from datetime import timedelta
from typing import Any
from pydantic import BaseModel
from hatchet_sdk import Context, Hatchet, TriggerWorkflowOptions
hatchet = Hatchet(debug=True)
# > FanoutParent
class ParentInput(BaseModel):
n: int = 100
class ChildInput(BaseModel):
a: str
parent_wf = hatchet.workflow(name="FanoutParent", input_validator=ParentInput)
child_wf = hatchet.workflow(name="FanoutChild", input_validator=ChildInput)
@parent_wf.task(execution_timeout=timedelta(minutes=5))
async def spawn(input: ParentInput, ctx: Context) -> dict[str, Any]:
print("spawning child")
result = await child_wf.aio_run_many(
[
child_wf.create_bulk_run_item(
input=ChildInput(a=str(i)),
options=TriggerWorkflowOptions(
additional_metadata={"hello": "earth"}, key=f"child{i}"
),
)
for i in range(input.n)
],
)
print(f"results {result}")
return {"results": result}
# > FanoutChild
@child_wf.task()
async def process(input: ChildInput, ctx: Context) -> dict[str, str]:
print(f"child process {input.a}")
return {"status": input.a}
@child_wf.task(parents=[process])
async def process2(input: ChildInput, ctx: Context) -> dict[str, str]:
process_output = ctx.task_output(process)
a = process_output["status"]
return {"status2": a + "2"}
child_wf.create_bulk_run_item()
def main() -> None:
worker = hatchet.worker("fanout-worker", slots=40, workflows=[parent_wf, child_wf])
worker.start()
if __name__ == "__main__":
main()