mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2026-05-13 05:38:39 -05:00
a6650ab84c
* refactor: overloads for run methods, deprecate _no_wait flavors * refactor: same thing for run_many flavors * fix: use gather_max_concurrency for gathering run results * refactor: deprecate a bunch of stuff on the context and core hatchet client * refactor: runs client deprecations * refactor: add deprecation warning to go duration string durations * refactor: durable tasks must be async * chore: changelog * fix: copilot comments * fix: couple more * chore: rm `debug=True` from all the examples * chore: more debug params * fix: more deprecations * fix: more warnings * fix: non-utc timezones * chore: deprecate more internal stuff * fix: a bunch more internal-only stuff, remove non-v2 listener logic * fix: test * chore: make a bunch more things internal * feat: priority enum * refactor: top-level `types` directory * refactor: start reworking labels * fix: some type checker issues * fix: rm transform method in favor of instance method * fix: internal worker label types * fix: more types * refactor: finish labels * fix: labels * chore: gen * fix: rm internal glue pydantic model * fix: removed `owned_loop`, register workflows on worker start instead of init * fix: deprecate ctx getter in favor of property * refactor: more label cleanup, prepare to remove worker context * fix: more deprecations * refactor: get rid of a pydantic a few places we don't need validation * refactor: plan to remove `BulkPushEventOptions` * chore: changelog * chore: changelog * refactor: trigger types * fix: pydantic model default * fix: instrumentor types * refactor: add `seen_at` to event * refactor: remove some more protobuf types * fix: rm unneeded ts_to_iso * refactor: clean up more examples * fix: more warnings * chore: gen * chore: more warnings * fix: one more * fix: warning, namespace * fix: linters * fix: double import * fix: ugh, cursor * fix: clean up a bunch of suboptimal tests * fix: overload signatures * chore: gen * chore: revert opts change * chore: one more revert * feat: start reworking option passing to remove pydantic models * refactor: worker opt * fix: type cleanup * refactor: keep working out signature details * fix: changelog * fix: deprecate some streaming methods * fix: linters * fix: rebase * chore: rm some unused stuff * chore: rm more unused stuff * fix: rm more uses of `options` * fix: more deprecation warnings * fix: instrumentor wrapping * fix: add test for instrumentor signature * chore: deprecate upsert labels on the worker context thingy * fix: deprecate more stuff on the worker context * feat: add `worker_labels_dict` property * fix: label types for workers * chore: update changelog * fix: version * refactor: durable_eviction -> eviction_policy * fix: lint * fix: instrumentor not passing options properly * fix: un-remove * fix: priority * chore: version * fix: improve warning log
65 lines
1.5 KiB
Python
65 lines
1.5 KiB
Python
from datetime import timedelta
|
|
from typing import Any
|
|
|
|
from pydantic import BaseModel
|
|
|
|
from hatchet_sdk import Context, Hatchet
|
|
|
|
hatchet = Hatchet()
|
|
|
|
|
|
class ParentInput(BaseModel):
|
|
n: int = 100
|
|
|
|
|
|
class ChildInput(BaseModel):
|
|
a: str
|
|
|
|
|
|
bulk_parent_wf = hatchet.workflow(name="BulkFanoutParent", input_validator=ParentInput)
|
|
bulk_child_wf = hatchet.workflow(name="BulkFanoutChild", input_validator=ChildInput)
|
|
|
|
|
|
# > BulkFanoutParent
|
|
@bulk_parent_wf.task(execution_timeout=timedelta(minutes=5))
|
|
async def spawn(input: ParentInput, ctx: Context) -> dict[str, list[dict[str, Any]]]:
|
|
# 👀 Create each workflow run to spawn
|
|
child_workflow_runs = [
|
|
bulk_child_wf.create_bulk_run_item(
|
|
input=ChildInput(a=str(i)),
|
|
key=f"child{i}",
|
|
additional_metadata={"hello": "earth"},
|
|
)
|
|
for i in range(input.n)
|
|
]
|
|
|
|
# 👀 Run workflows in bulk to improve performance
|
|
spawn_results = await bulk_child_wf.aio_run_many(child_workflow_runs)
|
|
|
|
return {"results": spawn_results}
|
|
|
|
|
|
|
|
|
|
@bulk_child_wf.task()
|
|
def process(input: ChildInput, ctx: Context) -> dict[str, str]:
|
|
print(f"child process {input.a}")
|
|
return {"status": "success " + input.a}
|
|
|
|
|
|
@bulk_child_wf.task()
|
|
def process2(input: ChildInput, ctx: Context) -> dict[str, str]:
|
|
print("child process2")
|
|
return {"status2": "success"}
|
|
|
|
|
|
def main() -> None:
|
|
worker = hatchet.worker(
|
|
"fanout-worker", slots=40, workflows=[bulk_parent_wf, bulk_child_wf]
|
|
)
|
|
worker.start()
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|