Fix: Python docs examples (#2255)

* feat: client example

* fix: batch i

* fix: batch ii

* fix: batch iii

* fix: batch iv
This commit is contained in:
matt
2025-09-05 15:08:23 -04:00
committed by GitHub
parent 8c85f47cd8
commit 4a50e454a6
46 changed files with 404 additions and 275 deletions

View File

@@ -1,3 +1,4 @@
from examples.dag.worker import dag_workflow
# > Trigger the DAG
dag_workflow.run()

View File

@@ -17,14 +17,20 @@ class RandomSum(BaseModel):
hatchet = Hatchet(debug=True)
# > Define a DAG
dag_workflow = hatchet.workflow(name="DAGWorkflow")
# > First task
@dag_workflow.task(execution_timeout=timedelta(seconds=5))
def step1(input: EmptyModel, ctx: Context) -> StepOutput:
return StepOutput(random_number=random.randint(1, 100))
# > Task with parents
@dag_workflow.task(execution_timeout=timedelta(seconds=5))
async def step2(input: EmptyModel, ctx: Context) -> StepOutput:
return StepOutput(random_number=random.randint(1, 100))
@@ -38,6 +44,8 @@ async def step3(input: EmptyModel, ctx: Context) -> RandomSum:
return RandomSum(sum=one + two)
@dag_workflow.task(parents=[step1, step3])
async def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:
print(
@@ -52,11 +60,13 @@ async def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:
}
# > Declare a worker
def main() -> None:
worker = hatchet.worker("dag-worker", workflows=[dag_workflow])
worker.start()
if __name__ == "__main__":
main()

View File

@@ -1,6 +1,27 @@
from hatchet_sdk import Hatchet
from hatchet_sdk import Hatchet, PushEventOptions
from hatchet_sdk.clients.events import BulkPushEventWithMetadata
hatchet = Hatchet()
# > Event trigger
hatchet.event.push("user:create", {"should_skip": False})
# > Event trigger with metadata
hatchet.event.push(
"user:create",
{"userId": "1234", "should_skip": False},
options=PushEventOptions(
additional_metadata={"source": "api"} # Arbitrary key-value pair
),
)
# > Bulk event push
hatchet.event.bulk_push(
events=[
BulkPushEventWithMetadata(
key="user:create",
payload={"userId": str(i), "should_skip": False},
)
for i in range(10)
]
)

View File

@@ -0,0 +1,15 @@
# > Child spawn
from examples.fanout.worker import ChildInput, child_wf
# 👀 example: run this inside of a parent task to spawn a child
child_wf.run(
ChildInput(a="b"),
)
# > Error handling
try:
child_wf.run(
ChildInput(a="b"),
)
except Exception as e:
print(f"Child workflow failed: {e}")

View File

@@ -1,6 +1,7 @@
import asyncio
from typing import Any
from examples.fanout.worker import ParentInput, parent_wf
from examples.fanout.worker import ChildInput, ParentInput, child_wf, parent_wf
from hatchet_sdk import Hatchet
from hatchet_sdk.clients.admin import TriggerWorkflowOptions
@@ -14,5 +15,18 @@ async def main() -> None:
)
# > Bulk run children
async def run_child_workflows(n: int) -> list[dict[str, Any]]:
return await child_wf.aio_run_many(
[
child_wf.create_bulk_run_item(
input=ChildInput(a=str(i)),
)
for i in range(n)
]
)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,29 @@
# > Worker
import asyncio
from aiohttp import ClientSession
from hatchet_sdk import Context, EmptyModel, Hatchet
hatchet = Hatchet()
async def fetch(session: ClientSession, url: str) -> bool:
async with session.get(url) as response:
return response.status == 200
@hatchet.task(name="Fetch")
async def hello_from_hatchet(input: EmptyModel, ctx: Context) -> dict[str, int]:
num_requests = 10
async with ClientSession() as session:
tasks = [
fetch(session, "https://docs.hatchet.run/home") for _ in range(num_requests)
]
results = await asyncio.gather(*tasks)
return {"count": results.count(True)}

View File

@@ -1,12 +1,15 @@
from examples.opentelemetry_instrumentation.client import hatchet
from examples.opentelemetry_instrumentation.tracer import trace_provider
from hatchet_sdk import Context, EmptyModel
# > Configure the instrumentor
from hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor
HatchetInstrumentor(
tracer_provider=trace_provider,
).instrument()
otel_workflow = hatchet.workflow(
name="OTelWorkflow",
)

View File

@@ -4,6 +4,7 @@ from .workflows.first_task import SimpleInput, first_task
async def main() -> None:
# > Run a Task
result = await first_task.aio_run(SimpleInput(message="Hello World!"))
print(

View File

@@ -5,6 +5,7 @@ from hatchet_sdk import Context
from ..hatchet_client import hatchet
# > Simple task
class SimpleInput(BaseModel):
message: str
@@ -19,3 +20,5 @@ def first_task(input: SimpleInput, ctx: Context) -> SimpleOutput:
print("first-task task called")
return SimpleOutput(transformed_message=input.message.lower())

View File

@@ -47,6 +47,9 @@ def step_2(input: RateLimitInput, ctx: Context) -> None:
def main() -> None:
# > Create a rate limit
RATE_LIMIT_KEY = "test-limit"
hatchet.rate_limits.put(RATE_LIMIT_KEY, 2, RateLimitDuration.SECOND)
worker = hatchet.worker(

View File

@@ -0,0 +1,4 @@
# > Create a Hatchet client
from hatchet_sdk import Hatchet
hatchet = Hatchet()

View File

@@ -0,0 +1,9 @@
# > Schedule a Task
from examples.simple.worker import simple
from datetime import datetime
schedule = simple.schedule([datetime(2025, 3, 14, 15, 9, 26)])
## 👀 do something with the id
print(schedule.id)

View File

@@ -0,0 +1,9 @@
from examples.simple.worker import simple
from hatchet_sdk import TriggerWorkflowOptions
# > Trigger with metadata
simple.run(
options=TriggerWorkflowOptions(
additional_metadata={"source": "api"} # Arbitrary key-value pair
)
)

View File

@@ -0,0 +1,6 @@
from hatchet_sdk import Hatchet
hatchet = Hatchet()
# > Define a workflow
simple = hatchet.workflow(name="example-workflow")

View File

@@ -0,0 +1,34 @@
from hatchet_sdk import Hatchet, Context
from pydantic import BaseModel
hatchet = Hatchet()
# > Define a task
class HelloInput(BaseModel):
name: str
class HelloOutput(BaseModel):
greeting: str
@hatchet.task(input_validator=HelloInput)
async def say_hello(input: HelloInput, ctx: Context) -> HelloOutput:
return HelloOutput(greeting=f"Hello, {input.name}!")
async def main() -> None:
# > Sync
ref = say_hello.run_no_wait(input=HelloInput(name="World"))
# > Async
ref = await say_hello.aio_run_no_wait(input=HelloInput(name="Async World"))
# > Result Sync
result = ref.result()
# > Result Async
result = await ref.aio_result()

View File

@@ -1,6 +1,8 @@
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
import UniversalTabs from "@/components/UniversalTabs";
import InstallCommand from "@/components/InstallCommand";
import { Snippet } from "@/components/code";
import { snippets } from "@/lib/generated/snippets";
#### Cd into your project directory
@@ -33,11 +35,7 @@ touch hatchet-client.py
Add the following code to the file:
```python copy
from hatchet_sdk import Hatchet
hatchet = Hatchet()
```
<Snippet src={snippets.python.setup.client.create_a_hatchet_client} />
You can now import the Hatchet Client in any file that needs it.

View File

@@ -1,6 +1,8 @@
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
import UniversalTabs from "@/components/UniversalTabs";
import InstallCommand from "@/components/InstallCommand";
import { Snippet } from "@/components/code";
import { snippets } from "@/lib/generated/snippets";
#### Create a new project directory and cd into it
@@ -32,11 +34,7 @@ touch src/hatchet_client.py
Add the following code to the file:
```python copy
from hatchet_sdk import Hatchet
hatchet = Hatchet()
```
<Snippet src={snippets.python.setup.client.create_a_hatchet_client} />
You can now import the Hatchet Client in any file that needs it.

View File

@@ -53,11 +53,7 @@ It is recommended to instantiate a shared Hatchet Client in a separate file as a
Create a new file called `hatchet-client.py` in your project root.
```python copy
from hatchet_sdk import Hatchet
hatchet = Hatchet()
```
<Snippet src={snippets.python.setup.client.create_a_hatchet_client} />
You can now import the Hatchet Client in any file that needs it.

View File

@@ -81,34 +81,7 @@ With Hatchet, all of your tasks can be defined as either sync or async functions
As a simple example, you can easily run a Hatchet task that makes 10 concurrent API calls using `async` / `await` with `asyncio.gather` and `aiohttp`, as opposed to needing to run each one in a blocking fashion as its own task. For example:
```python
import asyncio
from aiohttp import ClientSession
from hatchet_sdk import Context, EmptyModel, Hatchet
hatchet = Hatchet()
async def fetch(session: ClientSession, url: str) -> bool:
async with session.get(url) as response:
return response.status == 200
@hatchet.task(name="Fetch")
async def hello_from_hatchet(input: EmptyModel, ctx: Context) -> int:
num_requests = 10
async with ClientSession() as session:
tasks = [
fetch(session, "https://docs.hatchet.run/home") for _ in range(num_requests)
]
results = await asyncio.gather(*tasks)
return results.count(True)
```
<Snippet src={snippets.python.fastapi_blog.worker.worker} />
With Hatchet, you can perform all of these requests concurrently, in a single task, as opposed to needing to e.g. enqueue a single task per request. This is more performant on your side (as the client), and also puts less pressure on the backing queue, since it needs to handle an order of magnitude fewer requests in this case.

View File

@@ -1,5 +1,7 @@
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
import UniversalTabs from "../../components/UniversalTabs";
import { Snippet } from "@/components/code";
import { snippets } from "@/lib/generated/snippets";
# Additional Metadata
@@ -17,17 +19,9 @@ You can attach additional metadata when pushing events or triggering task runs u
<Tabs.Tab>
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
<Tabs.Tab>
```python
hatchet.event.push(
"user:create",
{'userId': '1234'},
options=PushEventOptions(
additional_metadata={
"source": "api" # Arbitrary key-value pair
}
)
)
```
<Snippet src={snippets.python.events.event.event_trigger_with_metadata} />
</Tabs.Tab>
<Tabs.Tab>
```typescript
@@ -61,16 +55,11 @@ err := c.Event().Push(
<Tabs.Tab>
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
<Tabs.Tab>
```python
simple_task.run(
SimpleInput(user_id=1234),
options=TriggerTaskOptions(
additional_metadata={
"hello": "moon" # Arbitrary key-value pair
}
)
)
```
<Snippet
src={snippets.python.simple.trigger_with_metadata.trigger_with_metadata}
/>
</Tabs.Tab>
<Tabs.Tab>
```typescript

View File

@@ -211,10 +211,7 @@ To spawn and run a child task from a parent task, use the appropriate method for
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
# Inside a parent task
child_result = child_task.run(child_input)
```
<Snippet src={snippets.python.fanout.example_child_spawn.child_spawn} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">
@@ -242,23 +239,7 @@ As shown in the examples above, you can spawn multiple child tasks in parallel:
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
# Run multiple child workflows concurrently with asyncio
import asyncio
async def run_child_workflows(n: int) -> list[dict[str, Any]]:
return await child.aio_run_many([
child.create_bulk_run_item(
options=TriggerWorkflowOptions(
input=ChildInput(n=i),
)
)
for i in range(n)
])
# In your parent task
child_results = await run_child_workflows(input.n)
```
<Snippet src={snippets.python.fanout.trigger.bulk_run_children} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">
@@ -317,14 +298,7 @@ When working with child workflows, it's important to properly handle errors. Her
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
try:
child_result = child.run(ChildInput(a="foobar"))
except Exception as e:
# Handle error from child workflow
print(f"Child workflow failed: {e}")
# Decide how to proceed - retry, skip, or fail the parent
```
<Snippet src={snippets.python.fanout.example_child_spawn.error_handling} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">

View File

@@ -72,29 +72,6 @@ FROM ubuntu:22.04
COPY --from=builder /usr/local/cuda-12.2 /usr/local/cuda-12.2
```
## Usage in Workflows
```python
from hatchet_sdk import Hatchet, Context
hatchet = Hatchet()
@hatchet.workflow()
class GPUWorkflow:
@hatchet.step(
compute=Compute(
gpu_kind="a100-80gb",
gpus=1,
memory_mb=163840,
num_replicas=1,
regions=["ams"]
)
)
def train_model(self, context: Context):
# GPU-accelerated code here
pass
```
## Memory and Resource Allocation
### Available Memory per GPU Type
@@ -106,34 +83,6 @@ class GPUWorkflow:
When configuring memory_mb, ensure it's sufficient for both system memory and GPU operations.
## Region-Specific Configurations
### A100-80GB Example
```python
# Multi-region A100-80GB configuration
compute = Compute(
gpu_kind="a100-80gb",
gpus=1,
memory_mb=163840,
num_replicas=3,
regions=["ams", "sjc", "syd"] # Replicas will be randomly distributed
)
```
### A10 Example
```python
# Chicago-based A10 configuration
compute = Compute(
gpu_kind="a10",
gpus=1,
memory_mb=49152,
num_replicas=2,
regions=["ord"]
)
```
## Best Practices
1. **GPU Selection**

View File

@@ -16,13 +16,7 @@ The returned object is an instance of the `Workflow` class, which is the primary
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
from hatchet_sdk import Context, EmptyModel, Hatchet
hatchet = Hatchet(debug=True)
simple = hatchet.workflow(name="SimpleWorkflow")
```
<Snippet src={snippets.python.dag.worker.define_a_dag} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">
@@ -70,11 +64,7 @@ The `task` method takes a name and a function that defines the task's behavior.
<Tabs.Tab title="Python">
In Python, the `task` method is a decorator, which is used like this to wrap a function:
```python
@simple.task()
def task_1(input: EmptyModel, ctx: Context) -> None:
print("executed task_1")
```
<Snippet src={snippets.python.dag.worker.first_task} />
The function takes two arguments: `input`, which is a Pydantic model, and `ctx`, which is the Hatchet `Context` object. We'll discuss both of these more later.
@@ -119,18 +109,7 @@ The power of Hatchet's workflow design comes from connecting tasks into a DAG st
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
@simple.task()
def first_task(input: EmptyModel, ctx: Context) -> dict:
return {"result": "Hello World"}
@simple.task(parents=[first_task])
def second_task(input: EmptyModel, ctx: Context) -> dict:
# Access output from parent task
first_result = ctx.task_output(first_task)
print(f"First task said: {first_result['result']}")
return {"final_result": "Completed"}
```
<Snippet src={snippets.python.dag.worker.task_with_parents} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">
@@ -172,10 +151,9 @@ As shown in the examples above, tasks can access outputs from their parent tasks
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
# Inside a task with parent dependencies
parent_output = ctx.task_output(parent_task_name)
```
<Snippet src={snippets.python.dag.worker.task_with_parents} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">
```typescript
@@ -202,13 +180,7 @@ You can run workflows directly or enqueue them for asynchronous execution. All t
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
# Run workflow and wait for the result
result = simple.run(input_data)
# Enqueue workflow to be executed asynchronously
run_id = simple.run_no_wait(input_data)
```
<Snippet src={snippets.python.dag.trigger.trigger_the_dag} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">

View File

@@ -18,12 +18,12 @@ Hatchet's SDK provides an instrumentor that auto-instruments Hatchet code if you
First, install the `otel` extra with (e.g.) `pip install hatchet-sdk[otel]`. Then, import the instrumentor:
```python
from path.to.your.trace.provider import trace_provider
from hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor
HatchetInstrumentor(tracer_provider=trace_provider).instrument()
```
<Snippet
src={
snippets.python.opentelemetry_instrumentation.worker
.configure_the_instrumentor
}
/>
You bring your own trace provider and plug it into the `HatchetInstrumentor`, call `instrument`, and that's it!

View File

@@ -87,11 +87,7 @@ Define the static rate limits that can be consumed by any step run across all wo
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
<Tabs.Tab>
```python
RATE_LIMIT_KEY = "test-limit"
hatchet.rate_limits.put(RATE_LIMIT_KEY, 10, RateLimitDuration.MINUTE)
```
<Snippet src={snippets.python.rate_limit.worker.create_a_rate_limit} />
</Tabs.Tab>
<Tabs.Tab>

View File

@@ -16,19 +16,18 @@ Some example use cases for fire-and-forget style tasks might be:
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
If we have the following workflow:
<Snippet src={snippets.python.trigger_methods.workflow.define_a_task} />
You can use your `Workflow` object to run a task and "forget" it by calling the `run_no_wait` method. This method enqueue a task run and return a `WorkflowRunRef`, a reference to that run, without waiting for the result.
```python
from src.workflows import my_workflow, MyWorkflowInputModel
ref = my_workflow.run_no_wait(MyWorkflowInputModel(foo="bar"))
```
<Snippet src={snippets.python.trigger_methods.workflow.sync} />
You can also `await` the result of `aio_run_no_wait`:
```python
ref = await my_task.aio_run_no_wait(input=MyTaskInputModel(foo="bar"))
```
<Snippet src={snippets.python.trigger_methods.workflow.async} />
Note that the type of `input` here is a Pydantic model that matches the input schema of your task.
@@ -54,15 +53,11 @@ Often it is useful to subscribe to the results of a task at a later time. The `r
Use `ref.result()` to block until the result is available:
```python
result = ref.result()
```
<Snippet src={snippets.python.trigger_methods.workflow.result_sync} />
or await `aio_result`:
```python
result = await ref.aio_result()
```
<Snippet src={snippets.python.trigger_methods.workflow.result_async} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">

View File

@@ -10,13 +10,7 @@ With your task defined, you can import it wherever you need to use it and invoke
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
from .task import simple
simple.run(
input=SimpleInput(Message="Hello, World!"),
)
```
<Snippet src={snippets.python.quickstart.run.run_a_task} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">

View File

@@ -35,14 +35,7 @@ Here's an example of creating a scheduled run to trigger a task tomorrow at noon
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
schedule = simple.schedule([datetime(2025, 3, 14, 15, 9, 26)])
## do something with the id
print(schedule.id)
```
<Snippet src={snippets.python.simple.schedule.schedule_a_task} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">

View File

@@ -19,14 +19,7 @@ Declare a worker by calling the `worker` method on the Hatchet client. The `work
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
def main() -> None:
worker = hatchet.worker("test-worker", workflows=[simple])
worker.start()
if __name__ == "__main__":
main()
```
<Snippet src={snippets.python.dag.worker.declare_a_worker}/>
<Callout type="warning">
If you are using Windows, attempting to run a worker will result in an error:

View File

@@ -18,22 +18,7 @@ The returned object is an instance of the `Task` class, which is the primary int
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
from hatchet_sdk import Context, EmptyModel, Hatchet
from pydantic import BaseModel
hatchet = Hatchet(debug=True)
class SimpleInput(BaseModel):
message: str
@hatchet.task(name="SimpleTask", input_validator=SimpleInput)
def simple(input: SimpleInput, ctx: Context) -> dict[str, str]:
return {
"transformed_message": input.message.lower(),
}
```
<Snippet src={snippets.python.quickstart.workflows.first_task.simple_task} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">
<Snippet src={snippets.typescript.simple.workflow.declaring_a_task} />
@@ -56,9 +41,7 @@ With your task defined, you can import it wherever you need to use it and invoke
<UniversalTabs items={["Python", "Typescript", "Go"]}>
<Tabs.Tab title="Python">
```python
simple.run(SimpleInput(message="HeLlO WoRlD"))
```
<Snippet src={snippets.python.quickstart.run.run_a_task} />
</Tabs.Tab>
<Tabs.Tab title="Typescript">

View File

@@ -26,31 +26,7 @@ There are two main ways to initiate workflows, by sending events to Hatchet and
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
<Tabs.Tab>
```python
from hatchet_sdk import Hatchet
hatchet = Hatchet()
events: List[BulkPushEventWithMetadata] = [
{
"key": "user:create",
"payload": {"message": "This is event 1"},
"additional_metadata": {"source": "test", "user_id": "user123"},
},
{
"key": "user:create",
"payload": {"message": "This is event 2"},
"additional_metadata": {"source": "test", "user_id": "user456"},
},
{
"key": "user:create",
"payload": {"message": "This is event 3"},
"additional_metadata": {"source": "test", "user_id": "user789"},
},
]
result = hatchet.client.event.bulk_push(events)
```
<Snippet src={snippets.python.events.event.bulk_event_push} />
</Tabs.Tab>
<Tabs.Tab>

View File

@@ -1,3 +1,5 @@
from examples.dag.worker import dag_workflow
# > Trigger the DAG
dag_workflow.run()
# !!

View File

@@ -17,14 +17,22 @@ class RandomSum(BaseModel):
hatchet = Hatchet(debug=True)
# > Define a DAG
dag_workflow = hatchet.workflow(name="DAGWorkflow")
# !!
# > First task
@dag_workflow.task(execution_timeout=timedelta(seconds=5))
def step1(input: EmptyModel, ctx: Context) -> StepOutput:
return StepOutput(random_number=random.randint(1, 100))
# !!
# > Task with parents
@dag_workflow.task(execution_timeout=timedelta(seconds=5))
async def step2(input: EmptyModel, ctx: Context) -> StepOutput:
return StepOutput(random_number=random.randint(1, 100))
@@ -38,6 +46,9 @@ async def step3(input: EmptyModel, ctx: Context) -> RandomSum:
return RandomSum(sum=one + two)
# !!
@dag_workflow.task(parents=[step1, step3])
async def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:
print(
@@ -52,11 +63,14 @@ async def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:
}
# > Declare a worker
def main() -> None:
worker = hatchet.worker("dag-worker", workflows=[dag_workflow])
worker.start()
# !!
if __name__ == "__main__":
main()

View File

@@ -1,7 +1,30 @@
from hatchet_sdk import Hatchet
from hatchet_sdk import Hatchet, PushEventOptions
from hatchet_sdk.clients.events import BulkPushEventWithMetadata
hatchet = Hatchet()
# > Event trigger
hatchet.event.push("user:create", {"should_skip": False})
# !!
# > Event trigger with metadata
hatchet.event.push(
"user:create",
{"userId": "1234", "should_skip": False},
options=PushEventOptions(
additional_metadata={"source": "api"} # Arbitrary key-value pair
),
)
# !!
# > Bulk event push
hatchet.event.bulk_push(
events=[
BulkPushEventWithMetadata(
key="user:create",
payload={"userId": str(i), "should_skip": False},
)
for i in range(10)
]
)
# !!

View File

@@ -0,0 +1,17 @@
# > Child spawn
from examples.fanout.worker import ChildInput, child_wf
# 👀 example: run this inside of a parent task to spawn a child
child_wf.run(
ChildInput(a="b"),
)
# !!
# > Error handling
try:
child_wf.run(
ChildInput(a="b"),
)
except Exception as e:
print(f"Child workflow failed: {e}")
# !!

View File

@@ -1,6 +1,7 @@
import asyncio
from typing import Any
from examples.fanout.worker import ParentInput, parent_wf
from examples.fanout.worker import ChildInput, ParentInput, child_wf, parent_wf
from hatchet_sdk import Hatchet
from hatchet_sdk.clients.admin import TriggerWorkflowOptions
@@ -14,5 +15,19 @@ async def main() -> None:
)
# > Bulk run children
async def run_child_workflows(n: int) -> list[dict[str, Any]]:
return await child_wf.aio_run_many(
[
child_wf.create_bulk_run_item(
input=ChildInput(a=str(i)),
)
for i in range(n)
]
)
# !!
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,30 @@
# > Worker
import asyncio
from aiohttp import ClientSession
from hatchet_sdk import Context, EmptyModel, Hatchet
hatchet = Hatchet()
async def fetch(session: ClientSession, url: str) -> bool:
async with session.get(url) as response:
return response.status == 200
@hatchet.task(name="Fetch")
async def hello_from_hatchet(input: EmptyModel, ctx: Context) -> dict[str, int]:
num_requests = 10
async with ClientSession() as session:
tasks = [
fetch(session, "https://docs.hatchet.run/home") for _ in range(num_requests)
]
results = await asyncio.gather(*tasks)
return {"count": results.count(True)}
# !!

View File

@@ -1,12 +1,16 @@
from examples.opentelemetry_instrumentation.client import hatchet
from examples.opentelemetry_instrumentation.tracer import trace_provider
from hatchet_sdk import Context, EmptyModel
# > Configure the instrumentor
from hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor
HatchetInstrumentor(
tracer_provider=trace_provider,
).instrument()
# !!
otel_workflow = hatchet.workflow(
name="OTelWorkflow",
)

View File

@@ -4,7 +4,9 @@ from .workflows.first_task import SimpleInput, first_task
async def main() -> None:
# > Run a Task
result = await first_task.aio_run(SimpleInput(message="Hello World!"))
# !!
print(
"Finished running task, and got the transformed message! The transformed message is:",

View File

@@ -5,6 +5,7 @@ from hatchet_sdk import Context
from ..hatchet_client import hatchet
# > Simple task
class SimpleInput(BaseModel):
message: str
@@ -19,3 +20,6 @@ def first_task(input: SimpleInput, ctx: Context) -> SimpleOutput:
print("first-task task called")
return SimpleOutput(transformed_message=input.message.lower())
# !!

View File

@@ -50,7 +50,11 @@ def step_2(input: RateLimitInput, ctx: Context) -> None:
def main() -> None:
# > Create a rate limit
RATE_LIMIT_KEY = "test-limit"
hatchet.rate_limits.put(RATE_LIMIT_KEY, 2, RateLimitDuration.SECOND)
# !!
worker = hatchet.worker(
"rate-limit-worker", slots=10, workflows=[rate_limit_workflow]

View File

@@ -0,0 +1,5 @@
# > Create a Hatchet client
from hatchet_sdk import Hatchet
hatchet = Hatchet()
# !!

View File

@@ -0,0 +1,11 @@
# > Schedule a Task
from datetime import datetime
from examples.simple.worker import simple
schedule = simple.schedule(datetime(2025, 3, 14, 15, 9, 26))
## 👀 do something with the id
print(schedule.id)
# !!

View File

@@ -0,0 +1,10 @@
from examples.simple.worker import simple
from hatchet_sdk import TriggerWorkflowOptions
# > Trigger with metadata
simple.run(
options=TriggerWorkflowOptions(
additional_metadata={"source": "api"} # Arbitrary key-value pair
)
)
# !!

View File

@@ -0,0 +1,7 @@
from hatchet_sdk import Hatchet
hatchet = Hatchet()
# > Define a workflow
simple = hatchet.workflow(name="example-workflow")
# !!

View File

@@ -0,0 +1,40 @@
from pydantic import BaseModel
from hatchet_sdk import Context, Hatchet
hatchet = Hatchet()
# > Define a task
class HelloInput(BaseModel):
name: str
class HelloOutput(BaseModel):
greeting: str
@hatchet.task(input_validator=HelloInput)
async def say_hello(input: HelloInput, ctx: Context) -> HelloOutput:
return HelloOutput(greeting=f"Hello, {input.name}!")
# !!
async def main() -> None:
# > Sync
ref = say_hello.run_no_wait(input=HelloInput(name="World"))
# !!
# > Async
ref = await say_hello.aio_run_no_wait(input=HelloInput(name="Async World"))
# !!
# > Result Sync
result = ref.result()
# !!
# > Result Async
result = await ref.aio_result()
# !!