Files
hatchet/examples/python/concurrency_workflow_level/worker.py
Gabe Ruttner 8e80faf2d6 Fe overhaul docs (#1640)
* api changes

* doc changes

* move docs

* generated

* generate

* pkg

* backmerge main

* revert to main

* revert main

* race?

* remove go tests
2025-04-30 14:10:09 -07:00

65 lines
1.4 KiB
Python

import asyncio
from pydantic import BaseModel
from hatchet_sdk import (
ConcurrencyExpression,
ConcurrencyLimitStrategy,
Context,
Hatchet,
)
hatchet = Hatchet(debug=True)
SLEEP_TIME = 2
DIGIT_MAX_RUNS = 8
NAME_MAX_RUNS = 3
# > Multiple Concurrency Keys
class WorkflowInput(BaseModel):
name: str
digit: str
concurrency_workflow_level_workflow = hatchet.workflow(
name="ConcurrencyWorkflowManyKeys",
input_validator=WorkflowInput,
concurrency=[
ConcurrencyExpression(
expression="input.digit",
max_runs=DIGIT_MAX_RUNS,
limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,
),
ConcurrencyExpression(
expression="input.name",
max_runs=NAME_MAX_RUNS,
limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,
),
],
)
@concurrency_workflow_level_workflow.task()
async def task_1(input: WorkflowInput, ctx: Context) -> None:
await asyncio.sleep(SLEEP_TIME)
@concurrency_workflow_level_workflow.task()
async def task_2(input: WorkflowInput, ctx: Context) -> None:
await asyncio.sleep(SLEEP_TIME)
def main() -> None:
worker = hatchet.worker(
"concurrency-worker-workflow-level",
slots=10,
workflows=[concurrency_workflow_level_workflow],
)
worker.start()
if __name__ == "__main__":
main()