Fix: Python docs examples (#2255)

* feat: client example

* fix: batch i

* fix: batch ii

* fix: batch iii

* fix: batch iv
This commit is contained in:
matt
2025-09-05 15:08:23 -04:00
committed by GitHub
parent 8c85f47cd8
commit 4a50e454a6
46 changed files with 404 additions and 275 deletions

View File

@@ -1,3 +1,5 @@
from examples.dag.worker import dag_workflow
# > Trigger the DAG
dag_workflow.run()
# !!

View File

@@ -17,14 +17,22 @@ class RandomSum(BaseModel):
hatchet = Hatchet(debug=True)
# > Define a DAG
dag_workflow = hatchet.workflow(name="DAGWorkflow")
# !!
# > First task
@dag_workflow.task(execution_timeout=timedelta(seconds=5))
def step1(input: EmptyModel, ctx: Context) -> StepOutput:
return StepOutput(random_number=random.randint(1, 100))
# !!
# > Task with parents
@dag_workflow.task(execution_timeout=timedelta(seconds=5))
async def step2(input: EmptyModel, ctx: Context) -> StepOutput:
return StepOutput(random_number=random.randint(1, 100))
@@ -38,6 +46,9 @@ async def step3(input: EmptyModel, ctx: Context) -> RandomSum:
return RandomSum(sum=one + two)
# !!
@dag_workflow.task(parents=[step1, step3])
async def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:
print(
@@ -52,11 +63,14 @@ async def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:
}
# > Declare a worker
def main() -> None:
worker = hatchet.worker("dag-worker", workflows=[dag_workflow])
worker.start()
# !!
if __name__ == "__main__":
main()

View File

@@ -1,7 +1,30 @@
from hatchet_sdk import Hatchet
from hatchet_sdk import Hatchet, PushEventOptions
from hatchet_sdk.clients.events import BulkPushEventWithMetadata
hatchet = Hatchet()
# > Event trigger
hatchet.event.push("user:create", {"should_skip": False})
# !!
# > Event trigger with metadata
hatchet.event.push(
"user:create",
{"userId": "1234", "should_skip": False},
options=PushEventOptions(
additional_metadata={"source": "api"} # Arbitrary key-value pair
),
)
# !!
# > Bulk event push
hatchet.event.bulk_push(
events=[
BulkPushEventWithMetadata(
key="user:create",
payload={"userId": str(i), "should_skip": False},
)
for i in range(10)
]
)
# !!

View File

@@ -0,0 +1,17 @@
# > Child spawn
from examples.fanout.worker import ChildInput, child_wf
# 👀 example: run this inside of a parent task to spawn a child
child_wf.run(
ChildInput(a="b"),
)
# !!
# > Error handling
try:
child_wf.run(
ChildInput(a="b"),
)
except Exception as e:
print(f"Child workflow failed: {e}")
# !!

View File

@@ -1,6 +1,7 @@
import asyncio
from typing import Any
from examples.fanout.worker import ParentInput, parent_wf
from examples.fanout.worker import ChildInput, ParentInput, child_wf, parent_wf
from hatchet_sdk import Hatchet
from hatchet_sdk.clients.admin import TriggerWorkflowOptions
@@ -14,5 +15,19 @@ async def main() -> None:
)
# > Bulk run children
async def run_child_workflows(n: int) -> list[dict[str, Any]]:
return await child_wf.aio_run_many(
[
child_wf.create_bulk_run_item(
input=ChildInput(a=str(i)),
)
for i in range(n)
]
)
# !!
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,30 @@
# > Worker
import asyncio
from aiohttp import ClientSession
from hatchet_sdk import Context, EmptyModel, Hatchet
hatchet = Hatchet()
async def fetch(session: ClientSession, url: str) -> bool:
async with session.get(url) as response:
return response.status == 200
@hatchet.task(name="Fetch")
async def hello_from_hatchet(input: EmptyModel, ctx: Context) -> dict[str, int]:
num_requests = 10
async with ClientSession() as session:
tasks = [
fetch(session, "https://docs.hatchet.run/home") for _ in range(num_requests)
]
results = await asyncio.gather(*tasks)
return {"count": results.count(True)}
# !!

View File

@@ -1,12 +1,16 @@
from examples.opentelemetry_instrumentation.client import hatchet
from examples.opentelemetry_instrumentation.tracer import trace_provider
from hatchet_sdk import Context, EmptyModel
# > Configure the instrumentor
from hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor
HatchetInstrumentor(
tracer_provider=trace_provider,
).instrument()
# !!
otel_workflow = hatchet.workflow(
name="OTelWorkflow",
)

View File

@@ -4,7 +4,9 @@ from .workflows.first_task import SimpleInput, first_task
async def main() -> None:
# > Run a Task
result = await first_task.aio_run(SimpleInput(message="Hello World!"))
# !!
print(
"Finished running task, and got the transformed message! The transformed message is:",

View File

@@ -5,6 +5,7 @@ from hatchet_sdk import Context
from ..hatchet_client import hatchet
# > Simple task
class SimpleInput(BaseModel):
message: str
@@ -19,3 +20,6 @@ def first_task(input: SimpleInput, ctx: Context) -> SimpleOutput:
print("first-task task called")
return SimpleOutput(transformed_message=input.message.lower())
# !!

View File

@@ -50,7 +50,11 @@ def step_2(input: RateLimitInput, ctx: Context) -> None:
def main() -> None:
# > Create a rate limit
RATE_LIMIT_KEY = "test-limit"
hatchet.rate_limits.put(RATE_LIMIT_KEY, 2, RateLimitDuration.SECOND)
# !!
worker = hatchet.worker(
"rate-limit-worker", slots=10, workflows=[rate_limit_workflow]

View File

@@ -0,0 +1,5 @@
# > Create a Hatchet client
from hatchet_sdk import Hatchet
hatchet = Hatchet()
# !!

View File

@@ -0,0 +1,11 @@
# > Schedule a Task
from datetime import datetime
from examples.simple.worker import simple
schedule = simple.schedule(datetime(2025, 3, 14, 15, 9, 26))
## 👀 do something with the id
print(schedule.id)
# !!

View File

@@ -0,0 +1,10 @@
from examples.simple.worker import simple
from hatchet_sdk import TriggerWorkflowOptions
# > Trigger with metadata
simple.run(
options=TriggerWorkflowOptions(
additional_metadata={"source": "api"} # Arbitrary key-value pair
)
)
# !!

View File

@@ -0,0 +1,7 @@
from hatchet_sdk import Hatchet
hatchet = Hatchet()
# > Define a workflow
simple = hatchet.workflow(name="example-workflow")
# !!

View File

@@ -0,0 +1,40 @@
from pydantic import BaseModel
from hatchet_sdk import Context, Hatchet
hatchet = Hatchet()
# > Define a task
class HelloInput(BaseModel):
name: str
class HelloOutput(BaseModel):
greeting: str
@hatchet.task(input_validator=HelloInput)
async def say_hello(input: HelloInput, ctx: Context) -> HelloOutput:
return HelloOutput(greeting=f"Hello, {input.name}!")
# !!
async def main() -> None:
# > Sync
ref = say_hello.run_no_wait(input=HelloInput(name="World"))
# !!
# > Async
ref = await say_hello.aio_run_no_wait(input=HelloInput(name="Async World"))
# !!
# > Result Sync
result = ref.result()
# !!
# > Result Async
result = await ref.aio_result()
# !!