mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2025-12-30 13:19:44 -06:00
Feat: Misc. Python improvements + Streaming Improvements (#1846)
* fix: contextvars explicit copy * feat: fix a ton of ruff errors * fix: couple more ruff rules * fix: ignore unhelpful rule * fix: exception group in newer Python versions for improved handling * fix: workflow docs * feat: context docs * feat: simple task counter * feat: config for setting max tasks * feat: graceful exit once worker exceeds max tasks * fix: optional * fix: docs * fix: events docs + gen * chore: gen * fix: one more dangling task * feat: add xdist in ci * fix: CI * fix: xdist fails me once again * fix: fix + extend some tests * fix: test cleanup * fix: exception group * fix: ugh * feat: changelog * Add Ruff linter callout to post * refactor: clean up runner error handling * feat: improved errors * fix: lint * feat: hacky serde impl * fix: improve serde + formatting * fix: logging * fix: lint * fix: unexpected errors * fix: naming, ruff * fix: rm cruft * Fix: Attempt to fix namespacing issue in event waits (#1885) * feat: add xdist in ci * fix: attempt to fix namespacing issue in event waits * fix: namespaced worker names * fix: applied namespace to the wrong thing * fix: rm hack * drive by: namespacing improvement * fix: delay * fix: changelog * fix: initial log work * fix: more logging work * fix: rm print cruft * feat: use a queue to send logs * fix: sentinel value to stop the loop * fix: use the log sender everywhere * fix: make streaming blocking, remove more thread pools * feat: changelog * fix: linting issues * fix: broken test * chore: bunch more generated stuff * fix: changelog * fix: one more * fix: mypy * chore: gen * Feat: Streaming Improvements (#1886) * Fix: Filter list improvements (#1899) * fix: uuid validation * fix: improve filter filtering * fix: inner join * fix: bug in workflow cached prop * chore: bump * fix: lint * chore: changelog * fix: separate filter queries * feat: improve filter filtering * fix: queries and the like * feat: add xdist in ci * feat: streaming test + gen * feat: add index to stream event * fix: rm langfuse dep * fix: lf * chore: gen * feat: impl index for stream on context * feat: tweak protos * feat: extend test * feat: send event index through queue * feat: first pass + debug logging * debug: fixes * debug: more possible issues * feat: generate new stream event protos * feat: first pass at using an alternate exchange for replaying incoming stream events * fix: exchange create timing * fix: rm unused protos * chore: gen * feat: python cleanup * fix: revert rabbit changes * fix: unwind a bunch of cruft * fix: optional index * chore: gen python * fix: event index nil handling * feat: improve test * fix: stream impl in sdk * fix: make test faster * chore: gen a ton more stuff * fix: test * fix: sorting helper * fix: bug * fix: one more ordering bug * feat: add some tests for buffering logic * feat: hangup test * feat: test no buffering if no index sent * fix: regular mutex * fix: pr feedback * fix: conflicts
This commit is contained in:
1
.github/workflows/build.yml
vendored
1
.github/workflows/build.yml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
paths-ignore:
|
||||
- 'sdks/**'
|
||||
- 'frontend/docs/**'
|
||||
- 'frontend/**/generated/**'
|
||||
- 'examples/**'
|
||||
|
||||
jobs:
|
||||
|
||||
@@ -328,6 +328,8 @@ message WorkflowEvent {
|
||||
|
||||
// (optional) the retry count of this step
|
||||
optional int32 retryCount = 9;
|
||||
|
||||
optional int64 eventIndex = 10;
|
||||
}
|
||||
|
||||
enum WorkflowRunEventType {
|
||||
|
||||
@@ -78,6 +78,8 @@ message PutStreamEventRequest {
|
||||
|
||||
// associated stream event metadata
|
||||
string metadata = 5;
|
||||
|
||||
optional int64 eventIndex = 6;
|
||||
}
|
||||
|
||||
message PutStreamEventResponse {}
|
||||
|
||||
@@ -10,8 +10,11 @@ service V1Dispatcher {
|
||||
rpc RegisterDurableEvent(RegisterDurableEventRequest) returns (RegisterDurableEventResponse) {}
|
||||
|
||||
rpc ListenForDurableEvent(stream ListenForDurableEventRequest) returns (stream DurableEvent) {}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
message RegisterDurableEventRequest {
|
||||
string task_id = 1; // external uuid for the task run
|
||||
string signal_key = 2; // the signal key for the event
|
||||
|
||||
@@ -43,6 +43,7 @@ func Lower(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, Lo
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
// > Accessing the filter payload
|
||||
func accessFilterPayload(ctx worker.HatchetContext, input EventInput) (*LowerTaskOutput, error) {
|
||||
fmt.Println(ctx.FilterPayload())
|
||||
@@ -51,6 +52,7 @@ func accessFilterPayload(ctx worker.HatchetContext, input EventInput) (*LowerTas
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
||||
// > Declare with filter
|
||||
func LowerWithFilter(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, LowerTaskOutput] {
|
||||
return factory.NewTask(
|
||||
@@ -71,6 +73,7 @@ func LowerWithFilter(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[Even
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
func Upper(hatchet v1.HatchetClient) workflow.WorkflowDeclaration[EventInput, UpperTaskOutput] {
|
||||
return factory.NewTask(
|
||||
create.StandaloneTask{
|
||||
|
||||
@@ -33,7 +33,6 @@ def step1(input: WorkflowInput, ctx: Context) -> None:
|
||||
print("starting step1")
|
||||
time.sleep(2)
|
||||
print("finished step1")
|
||||
pass
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
@@ -3,7 +3,7 @@ from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions
|
||||
from hatchet_sdk.clients.admin import DedupeViolationErr
|
||||
from hatchet_sdk.exceptions import DedupeViolationError
|
||||
|
||||
hatchet = Hatchet(debug=True)
|
||||
|
||||
@@ -20,15 +20,13 @@ async def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:
|
||||
for i in range(2):
|
||||
try:
|
||||
results.append(
|
||||
(
|
||||
dedupe_child_wf.aio_run(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata={"dedupe": "test"}, key=f"child{i}"
|
||||
),
|
||||
)
|
||||
dedupe_child_wf.aio_run(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata={"dedupe": "test"}, key=f"child{i}"
|
||||
),
|
||||
)
|
||||
)
|
||||
except DedupeViolationErr as e:
|
||||
except DedupeViolationError as e:
|
||||
print(f"dedupe violation {e}")
|
||||
continue
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -7,10 +6,6 @@ from examples.durable.worker import EVENT_KEY, SLEEP_TIME, durable_workflow
|
||||
from hatchet_sdk import Hatchet
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
os.getenv("CI", "false").lower() == "true",
|
||||
reason="Skipped in CI because of unreliability",
|
||||
)
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_durable(hatchet: Hatchet) -> None:
|
||||
ref = durable_workflow.run_no_wait()
|
||||
@@ -28,6 +23,12 @@ async def test_durable(hatchet: Hatchet) -> None:
|
||||
active_workers = [w for w in workers.rows if w.status == "ACTIVE"]
|
||||
|
||||
assert len(active_workers) == 2
|
||||
assert any(w.name == "e2e-test-worker" for w in active_workers)
|
||||
assert any(w.name.endswith("e2e-test-worker_durable") for w in active_workers)
|
||||
assert any(
|
||||
w.name == hatchet.config.apply_namespace("e2e-test-worker")
|
||||
for w in active_workers
|
||||
)
|
||||
assert any(
|
||||
w.name == hatchet.config.apply_namespace("e2e-test-worker_durable")
|
||||
for w in active_workers
|
||||
)
|
||||
assert result["durable_task"]["status"] == "success"
|
||||
|
||||
@@ -30,7 +30,7 @@ hatchet.event.push(
|
||||
hatchet.event.push(
|
||||
event_key=EVENT_KEY,
|
||||
payload={
|
||||
"should_skip": True,
|
||||
"should_skip": False,
|
||||
},
|
||||
options=PushEventOptions(
|
||||
scope="foobarbaz",
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import asyncio
|
||||
import json
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import AsyncGenerator, cast
|
||||
from typing import cast
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
@@ -255,7 +256,9 @@ async def test_async_event_bulk_push(hatchet: Hatchet) -> None:
|
||||
namespace = "bulk-test"
|
||||
|
||||
# Check that the returned events match the original events
|
||||
for original_event, returned_event in zip(sorted_events, sorted_returned_events):
|
||||
for original_event, returned_event in zip(
|
||||
sorted_events, sorted_returned_events, strict=False
|
||||
):
|
||||
assert returned_event.key == namespace + original_event.key
|
||||
|
||||
|
||||
|
||||
@@ -44,7 +44,7 @@ event_workflow_with_filter = hatchet.workflow(
|
||||
def task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:
|
||||
print("event received")
|
||||
|
||||
return dict(ctx.filter_payload)
|
||||
return ctx.filter_payload
|
||||
|
||||
|
||||
# > Accessing the filter payload
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# > Lifespan
|
||||
|
||||
from typing import AsyncGenerator, cast
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import cast
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from typing import AsyncGenerator, cast
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import cast
|
||||
from uuid import UUID
|
||||
|
||||
from psycopg_pool import ConnectionPool
|
||||
|
||||
@@ -16,7 +16,7 @@ logging_workflow = hatchet.workflow(
|
||||
@logging_workflow.task()
|
||||
def root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
|
||||
for i in range(12):
|
||||
logger.info("executed step1 - {}".format(i))
|
||||
logger.info(f"executed step1 - {i}")
|
||||
logger.info({"step1": "step1"})
|
||||
|
||||
time.sleep(0.1)
|
||||
@@ -31,7 +31,7 @@ def root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
|
||||
@logging_workflow.task()
|
||||
def context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
|
||||
for i in range(12):
|
||||
ctx.log("executed step1 - {}".format(i))
|
||||
ctx.log(f"executed step1 - {i}")
|
||||
ctx.log({"step1": "step1"})
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from collections.abc import Mapping
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Mapping
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
from pydantic import BaseModel
|
||||
@@ -10,13 +11,13 @@ from hatchet_sdk.context.context import Context
|
||||
from .hatchet_client import hatchet
|
||||
|
||||
|
||||
async def process_image(image_url: str, filters: List[str]) -> Dict[str, Any]:
|
||||
async def process_image(image_url: str, filters: list[str]) -> dict[str, Any]:
|
||||
# Do some image processing
|
||||
return {"url": image_url, "size": 100, "format": "png"}
|
||||
|
||||
|
||||
# > Before (Mergent)
|
||||
async def process_image_task(request: Any) -> Dict[str, Any]:
|
||||
async def process_image_task(request: Any) -> dict[str, Any]:
|
||||
image_url = request.json["image_url"]
|
||||
filters = request.json["filters"]
|
||||
try:
|
||||
@@ -32,12 +33,12 @@ async def process_image_task(request: Any) -> Dict[str, Any]:
|
||||
# > After (Hatchet)
|
||||
class ImageProcessInput(BaseModel):
|
||||
image_url: str
|
||||
filters: List[str]
|
||||
filters: list[str]
|
||||
|
||||
|
||||
class ImageProcessOutput(BaseModel):
|
||||
processed_url: str
|
||||
metadata: Dict[str, Any]
|
||||
metadata: dict[str, Any]
|
||||
|
||||
|
||||
@hatchet.task(
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from examples.non_retryable.worker import (
|
||||
@@ -9,6 +11,7 @@ from examples.non_retryable.worker import (
|
||||
from hatchet_sdk import Hatchet
|
||||
from hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType
|
||||
from hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails
|
||||
from hatchet_sdk.exceptions import FailedTaskRunExceptionGroup
|
||||
|
||||
|
||||
def find_id(runs: V1WorkflowRunDetails, match: str) -> str:
|
||||
@@ -19,9 +22,28 @@ def find_id(runs: V1WorkflowRunDetails, match: str) -> str:
|
||||
async def test_no_retry(hatchet: Hatchet) -> None:
|
||||
ref = await non_retryable_workflow.aio_run_no_wait()
|
||||
|
||||
with pytest.raises(Exception, match="retry"):
|
||||
with pytest.raises(FailedTaskRunExceptionGroup) as exc_info:
|
||||
await ref.aio_result()
|
||||
|
||||
exception_group = exc_info.value
|
||||
|
||||
assert len(exception_group.exceptions) == 2
|
||||
|
||||
exc_text = [e.exc for e in exception_group.exceptions]
|
||||
|
||||
non_retries = [
|
||||
e
|
||||
for e in exc_text
|
||||
if "This task should retry because it's not a NonRetryableException" in e
|
||||
]
|
||||
|
||||
other_errors = [e for e in exc_text if "This task should not retry" in e]
|
||||
|
||||
assert len(non_retries) == 1
|
||||
assert len(other_errors) == 1
|
||||
|
||||
await asyncio.sleep(3)
|
||||
|
||||
runs = await hatchet.runs.aio_get(ref.workflow_run_id)
|
||||
task_to_id = {
|
||||
task: find_id(runs, task.name)
|
||||
@@ -40,9 +62,7 @@ async def test_no_retry(hatchet: Hatchet) -> None:
|
||||
assert len(retrying_events) == 1
|
||||
|
||||
"""The task id of the retrying events should match the tasks that are retried"""
|
||||
assert {e.task_id for e in retrying_events} == {
|
||||
task_to_id[should_retry_wrong_exception_type],
|
||||
}
|
||||
assert retrying_events[0].task_id == task_to_id[should_retry_wrong_exception_type]
|
||||
|
||||
"""Three failed events should emit, one each for the two failing initial runs and one for the retry."""
|
||||
assert (
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import base64
|
||||
import os
|
||||
|
||||
from langfuse import Langfuse # type: ignore[import-untyped]
|
||||
from langfuse.openai import AsyncOpenAI # type: ignore[import-untyped]
|
||||
from langfuse import Langfuse # type: ignore
|
||||
from langfuse.openai import AsyncOpenAI # type: ignore
|
||||
|
||||
# > Configure Langfuse
|
||||
LANGFUSE_AUTH = base64.b64encode(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import asyncio
|
||||
|
||||
from langfuse import get_client # type: ignore[import-untyped]
|
||||
from langfuse import get_client # type: ignore
|
||||
from opentelemetry.trace import StatusCode
|
||||
|
||||
from examples.opentelemetry_instrumentation.langfuse.worker import langfuse_task
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import asyncio
|
||||
from collections.abc import AsyncGenerator
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from random import choice
|
||||
from subprocess import Popen
|
||||
from typing import Any, AsyncGenerator, Literal
|
||||
from typing import Any, Literal
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
@@ -58,7 +59,7 @@ async def dummy_runs() -> None:
|
||||
|
||||
await asyncio.sleep(3)
|
||||
|
||||
return None
|
||||
return
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
||||
74
examples/python/quickstart/poetry.lock
generated
74
examples/python/quickstart/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
@@ -114,7 +114,7 @@ propcache = ">=0.2.0"
|
||||
yarl = ">=1.17.0,<2.0"
|
||||
|
||||
[package.extras]
|
||||
speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
|
||||
speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp-retry"
|
||||
@@ -199,12 +199,12 @@ files = [
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]
|
||||
tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
|
||||
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "cel-python"
|
||||
@@ -460,14 +460,14 @@ setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "hatchet-sdk"
|
||||
version = "1.12.2"
|
||||
version = "1.0.0a1"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "hatchet_sdk-1.12.2-py3-none-any.whl", hash = "sha256:a2701fc9fe277935346bc1f974bce075afa61d6aae1e43ef01d3e9a06abd30ce"},
|
||||
{file = "hatchet_sdk-1.12.2.tar.gz", hash = "sha256:77a91539640d732523bff6135593d56171f5cb0185b3e86bbc561e27b18e9a32"},
|
||||
{file = "hatchet_sdk-1.0.0a1-py3-none-any.whl", hash = "sha256:bfc84358c8842cecd0d95b30645109733b7292dff0db1a776ca862785ee93d7f"},
|
||||
{file = "hatchet_sdk-1.0.0a1.tar.gz", hash = "sha256:f0272bbaac6faed75ff727826e9f7b1ac42ae597f9b590e14d392aada9c9692f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -483,11 +483,13 @@ grpcio-tools = [
|
||||
{version = ">=1.64.1,<1.68.dev0 || >=1.69.dev0", markers = "python_version < \"3.13\""},
|
||||
{version = ">=1.69.0", markers = "python_version >= \"3.13\""},
|
||||
]
|
||||
nest-asyncio = ">=1.6.0,<2.0.0"
|
||||
prometheus-client = ">=0.21.1,<0.22.0"
|
||||
protobuf = ">=5.29.5,<6.0.0"
|
||||
protobuf = ">=5.29.1,<6.0.0"
|
||||
pydantic = ">=2.6.3,<3.0.0"
|
||||
pydantic-settings = ">=2.7.1,<3.0.0"
|
||||
python-dateutil = ">=2.9.0.post0,<3.0.0"
|
||||
pyyaml = ">=6.0.1,<7.0.0"
|
||||
tenacity = ">=8.4.1"
|
||||
urllib3 = ">=1.26.20"
|
||||
|
||||
@@ -643,6 +645,18 @@ files = [
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "nest-asyncio"
|
||||
version = "1.6.0"
|
||||
description = "Patch asyncio to allow nested event loops"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
|
||||
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prometheus-client"
|
||||
version = "0.21.1"
|
||||
@@ -768,23 +782,23 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "protobuf"
|
||||
version = "5.29.5"
|
||||
version = "5.29.4"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079"},
|
||||
{file = "protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc"},
|
||||
{file = "protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671"},
|
||||
{file = "protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015"},
|
||||
{file = "protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61"},
|
||||
{file = "protobuf-5.29.5-cp38-cp38-win32.whl", hash = "sha256:ef91363ad4faba7b25d844ef1ada59ff1604184c0bcd8b39b8a6bef15e1af238"},
|
||||
{file = "protobuf-5.29.5-cp38-cp38-win_amd64.whl", hash = "sha256:7318608d56b6402d2ea7704ff1e1e4597bee46d760e7e4dd42a3d45e24b87f2e"},
|
||||
{file = "protobuf-5.29.5-cp39-cp39-win32.whl", hash = "sha256:6f642dc9a61782fa72b90878af134c5afe1917c89a568cd3476d758d3c3a0736"},
|
||||
{file = "protobuf-5.29.5-cp39-cp39-win_amd64.whl", hash = "sha256:470f3af547ef17847a28e1f47200a1cbf0ba3ff57b7de50d22776607cd2ea353"},
|
||||
{file = "protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5"},
|
||||
{file = "protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84"},
|
||||
{file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"},
|
||||
{file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"},
|
||||
{file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"},
|
||||
{file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"},
|
||||
{file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"},
|
||||
{file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"},
|
||||
{file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"},
|
||||
{file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"},
|
||||
{file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"},
|
||||
{file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"},
|
||||
{file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -806,7 +820,7 @@ typing-extensions = ">=4.12.2"
|
||||
|
||||
[package.extras]
|
||||
email = ["email-validator (>=2.0.0)"]
|
||||
timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""]
|
||||
timezone = ["tzdata"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
@@ -1048,13 +1062,13 @@ files = [
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
|
||||
core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]
|
||||
core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
|
||||
cover = ["pytest-cov"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||
enabler = ["pytest-enabler (>=2.2)"]
|
||||
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||
type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
|
||||
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||
type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
@@ -1133,7 +1147,7 @@ files = [
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
||||
h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
@@ -1238,4 +1252,4 @@ propcache = ">=0.2.0"
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "fe1fd90bab8fe4470ec4afbd5c1331962a821f6751a8f02bea567f9b9f44b815"
|
||||
content-hash = "74c12e499aa797ca5c8559af579f1212b0e4e3a77f068f9385db39d70ba304e0"
|
||||
|
||||
@@ -1,19 +1,16 @@
|
||||
import asyncio
|
||||
|
||||
from examples.streaming.worker import streaming_workflow
|
||||
from examples.streaming.worker import stream_task
|
||||
from hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType
|
||||
|
||||
|
||||
async def main() -> None:
|
||||
ref = await streaming_workflow.aio_run_no_wait()
|
||||
await asyncio.sleep(1)
|
||||
ref = await stream_task.aio_run_no_wait()
|
||||
|
||||
stream = ref.stream()
|
||||
|
||||
async for chunk in stream:
|
||||
print(chunk)
|
||||
async for chunk in ref.stream():
|
||||
if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:
|
||||
print(chunk.payload, flush=True, end="")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import asyncio
|
||||
|
||||
asyncio.run(main())
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import time
|
||||
|
||||
from examples.streaming.worker import streaming_workflow
|
||||
from examples.streaming.worker import stream_task
|
||||
|
||||
|
||||
def main() -> None:
|
||||
ref = streaming_workflow.run_no_wait()
|
||||
ref = stream_task.run_no_wait()
|
||||
time.sleep(1)
|
||||
|
||||
stream = ref.stream()
|
||||
|
||||
47
examples/python/streaming/test_streaming.py
Normal file
47
examples/python/streaming/test_streaming.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from subprocess import Popen
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from examples.streaming.worker import chunks, stream_task
|
||||
from hatchet_sdk import Hatchet
|
||||
from hatchet_sdk.clients.listeners.run_event_listener import (
|
||||
StepRunEvent,
|
||||
StepRunEventType,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"on_demand_worker",
|
||||
[
|
||||
(
|
||||
["poetry", "run", "python", "examples/streaming/worker.py", "--slots", "1"],
|
||||
8008,
|
||||
)
|
||||
],
|
||||
indirect=True,
|
||||
)
|
||||
@pytest.mark.parametrize("execution_number", range(1))
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_streaming_ordering_and_completeness(
|
||||
execution_number: int,
|
||||
hatchet: Hatchet,
|
||||
on_demand_worker: Popen[Any],
|
||||
) -> None:
|
||||
ref = await stream_task.aio_run_no_wait()
|
||||
|
||||
ix = 0
|
||||
anna_karenina = ""
|
||||
|
||||
async for chunk in ref.stream():
|
||||
if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:
|
||||
assert chunks[ix] == chunk.payload
|
||||
ix += 1
|
||||
anna_karenina += chunk.payload
|
||||
|
||||
assert ix == len(chunks)
|
||||
assert anna_karenina == "".join(chunks)
|
||||
|
||||
await ref.aio_result()
|
||||
@@ -1,23 +1,39 @@
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Generator
|
||||
|
||||
from hatchet_sdk import Context, EmptyModel, Hatchet
|
||||
|
||||
hatchet = Hatchet(debug=True)
|
||||
hatchet = Hatchet(debug=False)
|
||||
|
||||
# > Streaming
|
||||
|
||||
streaming_workflow = hatchet.workflow(name="StreamingWorkflow")
|
||||
content = """
|
||||
Happy families are all alike; every unhappy family is unhappy in its own way.
|
||||
|
||||
Everything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him. This position of affairs had now lasted three days, and not only the husband and wife themselves, but all the members of their family and household, were painfully conscious of it. Every person in the house felt that there was so sense in their living together, and that the stray people brought together by chance in any inn had more in common with one another than they, the members of the family and household of the Oblonskys. The wife did not leave her own room, the husband had not been at home for three days. The children ran wild all over the house; the English governess quarreled with the housekeeper, and wrote to a friend asking her to look out for a new situation for her; the man-cook had walked off the day before just at dinner time; the kitchen-maid, and the coachman had given warning.
|
||||
"""
|
||||
|
||||
|
||||
@streaming_workflow.task()
|
||||
async def step1(input: EmptyModel, ctx: Context) -> None:
|
||||
for i in range(10):
|
||||
await asyncio.sleep(1)
|
||||
ctx.put_stream(f"Processing {i}")
|
||||
def create_chunks(content: str, n: int) -> Generator[str, None, None]:
|
||||
for i in range(0, len(content), n):
|
||||
yield content[i : i + n]
|
||||
|
||||
|
||||
chunks = list(create_chunks(content, 10))
|
||||
|
||||
|
||||
@hatchet.task()
|
||||
async def stream_task(input: EmptyModel, ctx: Context) -> None:
|
||||
await asyncio.sleep(2)
|
||||
|
||||
for chunk in chunks:
|
||||
ctx.put_stream(chunk)
|
||||
await asyncio.sleep(0.05)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
worker = hatchet.worker("test-worker", workflows=[streaming_workflow])
|
||||
worker = hatchet.worker("test-worker", workflows=[stream_task])
|
||||
worker.start()
|
||||
|
||||
|
||||
|
||||
@@ -7,7 +7,10 @@ from examples.timeout.worker import refresh_timeout_wf, timeout_wf
|
||||
async def test_execution_timeout() -> None:
|
||||
run = timeout_wf.run_no_wait()
|
||||
|
||||
with pytest.raises(Exception, match="(Task exceeded timeout|TIMED_OUT)"):
|
||||
with pytest.raises(
|
||||
Exception,
|
||||
match="(Task exceeded timeout|TIMED_OUT|Workflow run .* failed with multiple errors)",
|
||||
):
|
||||
await run.aio_result()
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -7,10 +6,6 @@ from examples.waits.worker import task_condition_workflow
|
||||
from hatchet_sdk import Hatchet
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
os.getenv("CI", "false").lower() == "true",
|
||||
reason="Skipped in CI because of unreliability",
|
||||
)
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_waits(hatchet: Hatchet) -> None:
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ export const child3 = child.task({
|
||||
export const parent = hatchet.task({
|
||||
name: 'parent',
|
||||
fn: async (input: ParentInput, ctx) => {
|
||||
const c = await child.run({
|
||||
const c = await ctx.runChild(child, {
|
||||
Message: input.Message,
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import time\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\n# > Concurrency Strategy With Key\nclass WorkflowInput(BaseModel):\n group: str\n\n\nconcurrency_limit_rr_workflow = hatchet.workflow(\n name="ConcurrencyDemoWorkflowRR",\n concurrency=ConcurrencyExpression(\n expression="input.group",\n max_runs=1,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n input_validator=WorkflowInput,\n)\n\n\n@concurrency_limit_rr_workflow.task()\ndef step1(input: WorkflowInput, ctx: Context) -> None:\n print("starting step1")\n time.sleep(2)\n print("finished step1")\n pass\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "concurrency-demo-worker-rr",\n slots=10,\n workflows=[concurrency_limit_rr_workflow],\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
'import time\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\n# > Concurrency Strategy With Key\nclass WorkflowInput(BaseModel):\n group: str\n\n\nconcurrency_limit_rr_workflow = hatchet.workflow(\n name="ConcurrencyDemoWorkflowRR",\n concurrency=ConcurrencyExpression(\n expression="input.group",\n max_runs=1,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n input_validator=WorkflowInput,\n)\n\n\n@concurrency_limit_rr_workflow.task()\ndef step1(input: WorkflowInput, ctx: Context) -> None:\n print("starting step1")\n time.sleep(2)\n print("finished step1")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "concurrency-demo-worker-rr",\n slots=10,\n workflows=[concurrency_limit_rr_workflow],\n )\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
source: 'out/python/concurrency_limit_rr/worker.py',
|
||||
blocks: {
|
||||
concurrency_strategy_with_key: {
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import asyncio\nfrom datetime import timedelta\nfrom typing import Any\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.admin import DedupeViolationErr\n\nhatchet = Hatchet(debug=True)\n\ndedupe_parent_wf = hatchet.workflow(name="DedupeParent")\ndedupe_child_wf = hatchet.workflow(name="DedupeChild")\n\n\n@dedupe_parent_wf.task(execution_timeout=timedelta(minutes=1))\nasync def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:\n print("spawning child")\n\n results = []\n\n for i in range(2):\n try:\n results.append(\n (\n dedupe_child_wf.aio_run(\n options=TriggerWorkflowOptions(\n additional_metadata={"dedupe": "test"}, key=f"child{i}"\n ),\n )\n )\n )\n except DedupeViolationErr as e:\n print(f"dedupe violation {e}")\n continue\n\n result = await asyncio.gather(*results)\n print(f"results {result}")\n\n return {"results": result}\n\n\n@dedupe_child_wf.task()\nasync def process(input: EmptyModel, ctx: Context) -> dict[str, str]:\n await asyncio.sleep(3)\n\n print("child process")\n return {"status": "success"}\n\n\n@dedupe_child_wf.task()\nasync def process2(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print("child process2")\n return {"status2": "success"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "fanout-worker", slots=100, workflows=[dedupe_parent_wf, dedupe_child_wf]\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
'import asyncio\nfrom datetime import timedelta\nfrom typing import Any\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.exceptions import DedupeViolationError\n\nhatchet = Hatchet(debug=True)\n\ndedupe_parent_wf = hatchet.workflow(name="DedupeParent")\ndedupe_child_wf = hatchet.workflow(name="DedupeChild")\n\n\n@dedupe_parent_wf.task(execution_timeout=timedelta(minutes=1))\nasync def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:\n print("spawning child")\n\n results = []\n\n for i in range(2):\n try:\n results.append(\n dedupe_child_wf.aio_run(\n options=TriggerWorkflowOptions(\n additional_metadata={"dedupe": "test"}, key=f"child{i}"\n ),\n )\n )\n except DedupeViolationError as e:\n print(f"dedupe violation {e}")\n continue\n\n result = await asyncio.gather(*results)\n print(f"results {result}")\n\n return {"results": result}\n\n\n@dedupe_child_wf.task()\nasync def process(input: EmptyModel, ctx: Context) -> dict[str, str]:\n await asyncio.sleep(3)\n\n print("child process")\n return {"status": "success"}\n\n\n@dedupe_child_wf.task()\nasync def process2(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print("child process2")\n return {"status2": "success"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "fanout-worker", slots=100, workflows=[dedupe_parent_wf, dedupe_child_wf]\n )\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
source: 'out/python/dedupe/worker.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import asyncio\nimport os\n\nimport pytest\n\nfrom examples.durable.worker import EVENT_KEY, SLEEP_TIME, durable_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.skipif(\n os.getenv("CI", "false").lower() == "true",\n reason="Skipped in CI because of unreliability",\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_durable(hatchet: Hatchet) -> None:\n ref = durable_workflow.run_no_wait()\n\n await asyncio.sleep(SLEEP_TIME + 10)\n\n hatchet.event.push(EVENT_KEY, {})\n\n result = await ref.aio_result()\n\n workers = await hatchet.workers.aio_list()\n\n assert workers.rows\n\n active_workers = [w for w in workers.rows if w.status == "ACTIVE"]\n\n assert len(active_workers) == 2\n assert any(w.name == "e2e-test-worker" for w in active_workers)\n assert any(w.name.endswith("e2e-test-worker_durable") for w in active_workers)\n assert result["durable_task"]["status"] == "success"\n',
|
||||
'import asyncio\n\nimport pytest\n\nfrom examples.durable.worker import EVENT_KEY, SLEEP_TIME, durable_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_durable(hatchet: Hatchet) -> None:\n ref = durable_workflow.run_no_wait()\n\n await asyncio.sleep(SLEEP_TIME + 10)\n\n hatchet.event.push(EVENT_KEY, {})\n\n result = await ref.aio_result()\n\n workers = await hatchet.workers.aio_list()\n\n assert workers.rows\n\n active_workers = [w for w in workers.rows if w.status == "ACTIVE"]\n\n assert len(active_workers) == 2\n assert any(\n w.name == hatchet.config.apply_namespace("e2e-test-worker")\n for w in active_workers\n )\n assert any(\n w.name == hatchet.config.apply_namespace("e2e-test-worker_durable")\n for w in active_workers\n )\n assert result["durable_task"]["status"] == "success"\n',
|
||||
source: 'out/python/durable/test_durable.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'from examples.events.worker import EVENT_KEY, event_workflow\nfrom hatchet_sdk import Hatchet, PushEventOptions\n\nhatchet = Hatchet()\n\n# > Create a filter\nhatchet.filters.create(\n workflow_id=event_workflow.id,\n expression="input.should_skip == false",\n scope="foobarbaz",\n payload={\n "main_character": "Anna",\n "supporting_character": "Stiva",\n "location": "Moscow",\n },\n)\n\n# > Skip a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n "should_skip": True,\n },\n options=PushEventOptions(\n scope="foobarbaz",\n ),\n)\n\n# > Trigger a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n "should_skip": True,\n },\n options=PushEventOptions(\n scope="foobarbaz",\n ),\n)\n',
|
||||
'from examples.events.worker import EVENT_KEY, event_workflow\nfrom hatchet_sdk import Hatchet, PushEventOptions\n\nhatchet = Hatchet()\n\n# > Create a filter\nhatchet.filters.create(\n workflow_id=event_workflow.id,\n expression="input.should_skip == false",\n scope="foobarbaz",\n payload={\n "main_character": "Anna",\n "supporting_character": "Stiva",\n "location": "Moscow",\n },\n)\n\n# > Skip a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n "should_skip": True,\n },\n options=PushEventOptions(\n scope="foobarbaz",\n ),\n)\n\n# > Trigger a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n "should_skip": False,\n },\n options=PushEventOptions(\n scope="foobarbaz",\n ),\n)\n',
|
||||
source: 'out/python/events/filter.py',
|
||||
blocks: {
|
||||
create_a_filter: {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, DefaultFilter, Hatchet\n\nhatchet = Hatchet()\n\n\n# > Event trigger\nEVENT_KEY = "user:create"\nSECONDARY_KEY = "foobarbaz"\nWILDCARD_KEY = "subscription:*"\n\n\nclass EventWorkflowInput(BaseModel):\n should_skip: bool\n\n\nevent_workflow = hatchet.workflow(\n name="EventWorkflow",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n)\n\n# > Event trigger with filter\nevent_workflow_with_filter = hatchet.workflow(\n name="EventWorkflow",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n default_filters=[\n DefaultFilter(\n expression="true",\n scope="example-scope",\n payload={\n "main_character": "Anna",\n "supporting_character": "Stiva",\n "location": "Moscow",\n },\n )\n ],\n)\n\n\n@event_workflow.task()\ndef task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:\n print("event received")\n\n return dict(ctx.filter_payload)\n\n\n# > Accessing the filter payload\n@event_workflow_with_filter.task()\ndef filtered_task(input: EventWorkflowInput, ctx: Context) -> None:\n print(ctx.filter_payload)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(name="EventWorker", workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
'from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, DefaultFilter, Hatchet\n\nhatchet = Hatchet()\n\n\n# > Event trigger\nEVENT_KEY = "user:create"\nSECONDARY_KEY = "foobarbaz"\nWILDCARD_KEY = "subscription:*"\n\n\nclass EventWorkflowInput(BaseModel):\n should_skip: bool\n\n\nevent_workflow = hatchet.workflow(\n name="EventWorkflow",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n)\n\n# > Event trigger with filter\nevent_workflow_with_filter = hatchet.workflow(\n name="EventWorkflow",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n default_filters=[\n DefaultFilter(\n expression="true",\n scope="example-scope",\n payload={\n "main_character": "Anna",\n "supporting_character": "Stiva",\n "location": "Moscow",\n },\n )\n ],\n)\n\n\n@event_workflow.task()\ndef task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:\n print("event received")\n\n return ctx.filter_payload\n\n\n# > Accessing the filter payload\n@event_workflow_with_filter.task()\ndef filtered_task(input: EventWorkflowInput, ctx: Context) -> None:\n print(ctx.filter_payload)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(name="EventWorker", workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
source: 'out/python/events/worker.py',
|
||||
blocks: {
|
||||
event_trigger: {
|
||||
|
||||
@@ -3,12 +3,12 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'# > Lifespan\n\nfrom typing import AsyncGenerator, cast\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass Lifespan(BaseModel):\n foo: str\n pi: float\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n yield Lifespan(foo="bar", pi=3.14)\n\n\n@hatchet.task(name="LifespanWorkflow")\ndef lifespan_task(input: EmptyModel, ctx: Context) -> Lifespan:\n return cast(Lifespan, ctx.lifespan)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "test-worker", slots=1, workflows=[lifespan_task], lifespan=lifespan\n )\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
'# > Lifespan\n\nfrom collections.abc import AsyncGenerator\nfrom typing import cast\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass Lifespan(BaseModel):\n foo: str\n pi: float\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n yield Lifespan(foo="bar", pi=3.14)\n\n\n@hatchet.task(name="LifespanWorkflow")\ndef lifespan_task(input: EmptyModel, ctx: Context) -> Lifespan:\n return cast(Lifespan, ctx.lifespan)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n "test-worker", slots=1, workflows=[lifespan_task], lifespan=lifespan\n )\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
source: 'out/python/lifespans/simple.py',
|
||||
blocks: {
|
||||
lifespan: {
|
||||
start: 2,
|
||||
stop: 32,
|
||||
stop: 33,
|
||||
},
|
||||
},
|
||||
highlights: {},
|
||||
|
||||
@@ -3,16 +3,16 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'from typing import AsyncGenerator, cast\nfrom uuid import UUID\n\nfrom psycopg_pool import ConnectionPool\nfrom pydantic import BaseModel, ConfigDict\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Use the lifespan in a task\nclass TaskOutput(BaseModel):\n num_rows: int\n external_ids: list[UUID]\n\n\nlifespan_workflow = hatchet.workflow(name="LifespanWorkflow")\n\n\n@lifespan_workflow.task()\ndef sync_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute("SELECT * FROM v1_lookup_table_olap LIMIT 5;")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print("executed sync task with lifespan", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n\n\n@lifespan_workflow.task()\nasync def async_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute("SELECT * FROM v1_lookup_table_olap LIMIT 5;")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print("executed async task with lifespan", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n# > Define a lifespan\nclass Lifespan(BaseModel):\n model_config = ConfigDict(arbitrary_types_allowed=True)\n\n foo: str\n pool: ConnectionPool\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n print("Running lifespan!")\n with ConnectionPool("postgres://hatchet:hatchet@localhost:5431/hatchet") as pool:\n yield Lifespan(\n foo="bar",\n pool=pool,\n )\n\n print("Cleaning up lifespan!")\n\n\nworker = hatchet.worker(\n "test-worker", slots=1, workflows=[lifespan_workflow], lifespan=lifespan\n)\n\n\ndef main() -> None:\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
'from collections.abc import AsyncGenerator\nfrom typing import cast\nfrom uuid import UUID\n\nfrom psycopg_pool import ConnectionPool\nfrom pydantic import BaseModel, ConfigDict\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Use the lifespan in a task\nclass TaskOutput(BaseModel):\n num_rows: int\n external_ids: list[UUID]\n\n\nlifespan_workflow = hatchet.workflow(name="LifespanWorkflow")\n\n\n@lifespan_workflow.task()\ndef sync_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute("SELECT * FROM v1_lookup_table_olap LIMIT 5;")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print("executed sync task with lifespan", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n\n\n@lifespan_workflow.task()\nasync def async_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute("SELECT * FROM v1_lookup_table_olap LIMIT 5;")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print("executed async task with lifespan", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n# > Define a lifespan\nclass Lifespan(BaseModel):\n model_config = ConfigDict(arbitrary_types_allowed=True)\n\n foo: str\n pool: ConnectionPool\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n print("Running lifespan!")\n with ConnectionPool("postgres://hatchet:hatchet@localhost:5431/hatchet") as pool:\n yield Lifespan(\n foo="bar",\n pool=pool,\n )\n\n print("Cleaning up lifespan!")\n\n\nworker = hatchet.worker(\n "test-worker", slots=1, workflows=[lifespan_workflow], lifespan=lifespan\n)\n\n\ndef main() -> None:\n worker.start()\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
source: 'out/python/lifespans/worker.py',
|
||||
blocks: {
|
||||
use_the_lifespan_in_a_task: {
|
||||
start: 13,
|
||||
stop: 39,
|
||||
start: 14,
|
||||
stop: 40,
|
||||
},
|
||||
define_a_lifespan: {
|
||||
start: 62,
|
||||
stop: 82,
|
||||
start: 63,
|
||||
stop: 83,
|
||||
},
|
||||
},
|
||||
highlights: {},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'# > LoggingWorkflow\n\nimport logging\nimport time\n\nfrom examples.logger.client import hatchet\nfrom hatchet_sdk import Context, EmptyModel\n\nlogger = logging.getLogger(__name__)\n\nlogging_workflow = hatchet.workflow(\n name="LoggingWorkflow",\n)\n\n\n@logging_workflow.task()\ndef root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n logger.info("executed step1 - {}".format(i))\n logger.info({"step1": "step1"})\n\n time.sleep(0.1)\n\n return {"status": "success"}\n\n\n\n# > ContextLogger\n\n\n@logging_workflow.task()\ndef context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n ctx.log("executed step1 - {}".format(i))\n ctx.log({"step1": "step1"})\n\n time.sleep(0.1)\n\n return {"status": "success"}\n\n\n',
|
||||
'# > LoggingWorkflow\n\nimport logging\nimport time\n\nfrom examples.logger.client import hatchet\nfrom hatchet_sdk import Context, EmptyModel\n\nlogger = logging.getLogger(__name__)\n\nlogging_workflow = hatchet.workflow(\n name="LoggingWorkflow",\n)\n\n\n@logging_workflow.task()\ndef root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n logger.info(f"executed step1 - {i}")\n logger.info({"step1": "step1"})\n\n time.sleep(0.1)\n\n return {"status": "success"}\n\n\n\n# > ContextLogger\n\n\n@logging_workflow.task()\ndef context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n ctx.log(f"executed step1 - {i}")\n ctx.log({"step1": "step1"})\n\n time.sleep(0.1)\n\n return {"status": "success"}\n\n\n',
|
||||
source: 'out/python/logger/workflow.py',
|
||||
blocks: {
|
||||
loggingworkflow: {
|
||||
|
||||
@@ -3,32 +3,32 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'from datetime import datetime, timedelta, timezone\nfrom typing import Any, Dict, List, Mapping\n\nimport requests\nfrom pydantic import BaseModel\nfrom requests import Response\n\nfrom hatchet_sdk.context.context import Context\n\nfrom .hatchet_client import hatchet\n\n\nasync def process_image(image_url: str, filters: List[str]) -> Dict[str, Any]:\n # Do some image processing\n return {"url": image_url, "size": 100, "format": "png"}\n\n\n# > Before (Mergent)\nasync def process_image_task(request: Any) -> Dict[str, Any]:\n image_url = request.json["image_url"]\n filters = request.json["filters"]\n try:\n result = await process_image(image_url, filters)\n return {"success": True, "processed_url": result["url"]}\n except Exception as e:\n print(f"Image processing failed: {e}")\n raise\n\n\n\n\n# > After (Hatchet)\nclass ImageProcessInput(BaseModel):\n image_url: str\n filters: List[str]\n\n\nclass ImageProcessOutput(BaseModel):\n processed_url: str\n metadata: Dict[str, Any]\n\n\n@hatchet.task(\n name="image-processor",\n retries=3,\n execution_timeout="10m",\n input_validator=ImageProcessInput,\n)\nasync def image_processor(input: ImageProcessInput, ctx: Context) -> ImageProcessOutput:\n # Do some image processing\n result = await process_image(input.image_url, input.filters)\n\n if not result["url"]:\n raise ValueError("Processing failed to generate URL")\n\n return ImageProcessOutput(\n processed_url=result["url"],\n metadata={\n "size": result["size"],\n "format": result["format"],\n "applied_filters": input.filters,\n },\n )\n\n\n\n\nasync def run() -> None:\n # > Running a task (Mergent)\n headers: Mapping[str, str] = {\n "Authorization": "Bearer <token>",\n "Content-Type": "application/json",\n }\n\n task_data = {\n "name": "4cf95241-fa19-47ef-8a67-71e483747649",\n "queue": "default",\n "request": {\n "url": "https://example.com",\n "headers": {\n "Authorization": "fake-secret-token",\n "Content-Type": "application/json",\n },\n "body": "Hello, world!",\n },\n }\n\n try:\n response: Response = requests.post(\n "https://api.mergent.co/v2/tasks",\n headers=headers,\n json=task_data,\n )\n print(response.json())\n except Exception as e:\n print(f"Error: {e}")\n\n # > Running a task (Hatchet)\n result = await image_processor.aio_run(\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"])\n )\n\n # you can await fully typed results\n print(result)\n\n\nasync def schedule() -> None:\n # > Scheduling tasks (Mergent)\n options = {\n # same options as before\n "json": {\n # same body as before\n "delay": "5m"\n }\n }\n\n print(options)\n\n # > Scheduling tasks (Hatchet)\n # Schedule the task to run at a specific time\n run_at = datetime.now(tz=timezone.utc) + timedelta(days=1)\n await image_processor.aio_schedule(\n run_at,\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"]),\n )\n\n # Schedule the task to run every hour\n await image_processor.aio_create_cron(\n "run-hourly",\n "0 * * * *",\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"]),\n )\n',
|
||||
'from collections.abc import Mapping\nfrom datetime import datetime, timedelta, timezone\nfrom typing import Any\n\nimport requests\nfrom pydantic import BaseModel\nfrom requests import Response\n\nfrom hatchet_sdk.context.context import Context\n\nfrom .hatchet_client import hatchet\n\n\nasync def process_image(image_url: str, filters: list[str]) -> dict[str, Any]:\n # Do some image processing\n return {"url": image_url, "size": 100, "format": "png"}\n\n\n# > Before (Mergent)\nasync def process_image_task(request: Any) -> dict[str, Any]:\n image_url = request.json["image_url"]\n filters = request.json["filters"]\n try:\n result = await process_image(image_url, filters)\n return {"success": True, "processed_url": result["url"]}\n except Exception as e:\n print(f"Image processing failed: {e}")\n raise\n\n\n\n\n# > After (Hatchet)\nclass ImageProcessInput(BaseModel):\n image_url: str\n filters: list[str]\n\n\nclass ImageProcessOutput(BaseModel):\n processed_url: str\n metadata: dict[str, Any]\n\n\n@hatchet.task(\n name="image-processor",\n retries=3,\n execution_timeout="10m",\n input_validator=ImageProcessInput,\n)\nasync def image_processor(input: ImageProcessInput, ctx: Context) -> ImageProcessOutput:\n # Do some image processing\n result = await process_image(input.image_url, input.filters)\n\n if not result["url"]:\n raise ValueError("Processing failed to generate URL")\n\n return ImageProcessOutput(\n processed_url=result["url"],\n metadata={\n "size": result["size"],\n "format": result["format"],\n "applied_filters": input.filters,\n },\n )\n\n\n\n\nasync def run() -> None:\n # > Running a task (Mergent)\n headers: Mapping[str, str] = {\n "Authorization": "Bearer <token>",\n "Content-Type": "application/json",\n }\n\n task_data = {\n "name": "4cf95241-fa19-47ef-8a67-71e483747649",\n "queue": "default",\n "request": {\n "url": "https://example.com",\n "headers": {\n "Authorization": "fake-secret-token",\n "Content-Type": "application/json",\n },\n "body": "Hello, world!",\n },\n }\n\n try:\n response: Response = requests.post(\n "https://api.mergent.co/v2/tasks",\n headers=headers,\n json=task_data,\n )\n print(response.json())\n except Exception as e:\n print(f"Error: {e}")\n\n # > Running a task (Hatchet)\n result = await image_processor.aio_run(\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"])\n )\n\n # you can await fully typed results\n print(result)\n\n\nasync def schedule() -> None:\n # > Scheduling tasks (Mergent)\n options = {\n # same options as before\n "json": {\n # same body as before\n "delay": "5m"\n }\n }\n\n print(options)\n\n # > Scheduling tasks (Hatchet)\n # Schedule the task to run at a specific time\n run_at = datetime.now(tz=timezone.utc) + timedelta(days=1)\n await image_processor.aio_schedule(\n run_at,\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"]),\n )\n\n # Schedule the task to run every hour\n await image_processor.aio_create_cron(\n "run-hourly",\n "0 * * * *",\n ImageProcessInput(image_url="https://example.com/image.png", filters=["blur"]),\n )\n',
|
||||
source: 'out/python/migration_guides/mergent.py',
|
||||
blocks: {
|
||||
before_mergent: {
|
||||
start: 19,
|
||||
stop: 29,
|
||||
start: 20,
|
||||
stop: 30,
|
||||
},
|
||||
after_hatchet: {
|
||||
start: 33,
|
||||
stop: 65,
|
||||
start: 34,
|
||||
stop: 66,
|
||||
},
|
||||
running_a_task_mergent: {
|
||||
start: 70,
|
||||
stop: 96,
|
||||
start: 71,
|
||||
stop: 97,
|
||||
},
|
||||
running_a_task_hatchet: {
|
||||
start: 99,
|
||||
stop: 104,
|
||||
start: 100,
|
||||
stop: 105,
|
||||
},
|
||||
scheduling_tasks_mergent: {
|
||||
start: 109,
|
||||
stop: 115,
|
||||
start: 110,
|
||||
stop: 116,
|
||||
},
|
||||
scheduling_tasks_hatchet: {
|
||||
start: 120,
|
||||
stop: 132,
|
||||
start: 121,
|
||||
stop: 133,
|
||||
},
|
||||
},
|
||||
highlights: {},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import pytest\n\nfrom examples.non_retryable.worker import (\n non_retryable_workflow,\n should_not_retry,\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n)\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType\nfrom hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails\n\n\ndef find_id(runs: V1WorkflowRunDetails, match: str) -> str:\n return next(t.metadata.id for t in runs.tasks if match in t.display_name)\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_no_retry(hatchet: Hatchet) -> None:\n ref = await non_retryable_workflow.aio_run_no_wait()\n\n with pytest.raises(Exception, match="retry"):\n await ref.aio_result()\n\n runs = await hatchet.runs.aio_get(ref.workflow_run_id)\n task_to_id = {\n task: find_id(runs, task.name)\n for task in [\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n should_not_retry,\n ]\n }\n\n retrying_events = [\n e for e in runs.task_events if e.event_type == V1TaskEventType.RETRYING\n ]\n\n """Only one task should be retried."""\n assert len(retrying_events) == 1\n\n """The task id of the retrying events should match the tasks that are retried"""\n assert {e.task_id for e in retrying_events} == {\n task_to_id[should_retry_wrong_exception_type],\n }\n\n """Three failed events should emit, one each for the two failing initial runs and one for the retry."""\n assert (\n len([e for e in runs.task_events if e.event_type == V1TaskEventType.FAILED])\n == 3\n )\n',
|
||||
'import asyncio\n\nimport pytest\n\nfrom examples.non_retryable.worker import (\n non_retryable_workflow,\n should_not_retry,\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n)\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType\nfrom hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails\nfrom hatchet_sdk.exceptions import FailedTaskRunExceptionGroup\n\n\ndef find_id(runs: V1WorkflowRunDetails, match: str) -> str:\n return next(t.metadata.id for t in runs.tasks if match in t.display_name)\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_no_retry(hatchet: Hatchet) -> None:\n ref = await non_retryable_workflow.aio_run_no_wait()\n\n with pytest.raises(FailedTaskRunExceptionGroup) as exc_info:\n await ref.aio_result()\n\n exception_group = exc_info.value\n\n assert len(exception_group.exceptions) == 2\n\n exc_text = [e.exc for e in exception_group.exceptions]\n\n non_retries = [\n e\n for e in exc_text\n if "This task should retry because it\'s not a NonRetryableException" in e\n ]\n\n other_errors = [e for e in exc_text if "This task should not retry" in e]\n\n assert len(non_retries) == 1\n assert len(other_errors) == 1\n\n await asyncio.sleep(3)\n\n runs = await hatchet.runs.aio_get(ref.workflow_run_id)\n task_to_id = {\n task: find_id(runs, task.name)\n for task in [\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n should_not_retry,\n ]\n }\n\n retrying_events = [\n e for e in runs.task_events if e.event_type == V1TaskEventType.RETRYING\n ]\n\n """Only one task should be retried."""\n assert len(retrying_events) == 1\n\n """The task id of the retrying events should match the tasks that are retried"""\n assert retrying_events[0].task_id == task_to_id[should_retry_wrong_exception_type]\n\n """Three failed events should emit, one each for the two failing initial runs and one for the retry."""\n assert (\n len([e for e in runs.task_events if e.event_type == V1TaskEventType.FAILED])\n == 3\n )\n',
|
||||
source: 'out/python/non_retryable/test_no_retry.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import base64\nimport os\n\nfrom langfuse import Langfuse # type: ignore[import-untyped]\nfrom langfuse.openai import AsyncOpenAI # type: ignore[import-untyped]\n\n# > Configure Langfuse\nLANGFUSE_AUTH = base64.b64encode(\n f"{os.getenv(\'LANGFUSE_PUBLIC_KEY\')}:{os.getenv(\'LANGFUSE_SECRET_KEY\')}".encode()\n).decode()\n\nos.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = (\n os.getenv("LANGFUSE_HOST", "https://us.cloud.langfuse.com") + "/api/public/otel"\n)\nos.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}"\n\n## Note: Langfuse sets the global tracer provider, so you don\'t need to worry about it\nlf = Langfuse(\n public_key=os.getenv("LANGFUSE_PUBLIC_KEY"),\n secret_key=os.getenv("LANGFUSE_SECRET_KEY"),\n host=os.getenv("LANGFUSE_HOST", "https://app.langfuse.com"),\n)\n\n# > Create OpenAI client\nopenai = AsyncOpenAI(\n api_key=os.getenv("OPENAI_API_KEY"),\n)\n',
|
||||
'import base64\nimport os\n\nfrom langfuse import Langfuse # type: ignore\nfrom langfuse.openai import AsyncOpenAI # type: ignore\n\n# > Configure Langfuse\nLANGFUSE_AUTH = base64.b64encode(\n f"{os.getenv(\'LANGFUSE_PUBLIC_KEY\')}:{os.getenv(\'LANGFUSE_SECRET_KEY\')}".encode()\n).decode()\n\nos.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = (\n os.getenv("LANGFUSE_HOST", "https://us.cloud.langfuse.com") + "/api/public/otel"\n)\nos.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}"\n\n## Note: Langfuse sets the global tracer provider, so you don\'t need to worry about it\nlf = Langfuse(\n public_key=os.getenv("LANGFUSE_PUBLIC_KEY"),\n secret_key=os.getenv("LANGFUSE_SECRET_KEY"),\n host=os.getenv("LANGFUSE_HOST", "https://app.langfuse.com"),\n)\n\n# > Create OpenAI client\nopenai = AsyncOpenAI(\n api_key=os.getenv("OPENAI_API_KEY"),\n)\n',
|
||||
source: 'out/python/opentelemetry_instrumentation/langfuse/client.py',
|
||||
blocks: {
|
||||
configure_langfuse: {
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import asyncio\n\nfrom langfuse import get_client # type: ignore[import-untyped]\nfrom opentelemetry.trace import StatusCode\n\nfrom examples.opentelemetry_instrumentation.langfuse.worker import langfuse_task\n\n# > Trigger task\ntracer = get_client()\n\n\nasync def main() -> None:\n # Traces will send to Langfuse\n # Use `_otel_tracer` to access the OpenTelemetry tracer if you need\n # to e.g. log statuses or attributes manually.\n with tracer._otel_tracer.start_as_current_span(name="trigger") as span:\n result = await langfuse_task.aio_run()\n location = result.get("location")\n\n if not location:\n span.set_status(StatusCode.ERROR)\n return\n\n span.set_attribute("location", location)\n\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n',
|
||||
'import asyncio\n\nfrom langfuse import get_client # type: ignore\nfrom opentelemetry.trace import StatusCode\n\nfrom examples.opentelemetry_instrumentation.langfuse.worker import langfuse_task\n\n# > Trigger task\ntracer = get_client()\n\n\nasync def main() -> None:\n # Traces will send to Langfuse\n # Use `_otel_tracer` to access the OpenTelemetry tracer if you need\n # to e.g. log statuses or attributes manually.\n with tracer._otel_tracer.start_as_current_span(name="trigger") as span:\n result = await langfuse_task.aio_run()\n location = result.get("location")\n\n if not location:\n span.set_status(StatusCode.ERROR)\n return\n\n span.set_attribute("location", location)\n\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n',
|
||||
source: 'out/python/opentelemetry_instrumentation/langfuse/trigger.py',
|
||||
blocks: {
|
||||
trigger_task: {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import asyncio\n\nfrom examples.streaming.worker import streaming_workflow\n\n\nasync def main() -> None:\n ref = await streaming_workflow.aio_run_no_wait()\n await asyncio.sleep(1)\n\n stream = ref.stream()\n\n async for chunk in stream:\n print(chunk)\n\n\nif __name__ == "__main__":\n import asyncio\n\n asyncio.run(main())\n',
|
||||
'import asyncio\n\nfrom examples.streaming.worker import stream_task\nfrom hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType\n\n\nasync def main() -> None:\n ref = await stream_task.aio_run_no_wait()\n\n async for chunk in ref.stream():\n if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:\n print(chunk.payload, flush=True, end="")\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n',
|
||||
source: 'out/python/streaming/async_stream.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import async_stream from './async_stream';
|
||||
import sync_stream from './sync_stream';
|
||||
import test_streaming from './test_streaming';
|
||||
import worker from './worker';
|
||||
|
||||
export { async_stream };
|
||||
export { sync_stream };
|
||||
export { test_streaming };
|
||||
export { worker };
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import time\n\nfrom examples.streaming.worker import streaming_workflow\n\n\ndef main() -> None:\n ref = streaming_workflow.run_no_wait()\n time.sleep(1)\n\n stream = ref.stream()\n\n for chunk in stream:\n print(chunk)\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
'import time\n\nfrom examples.streaming.worker import stream_task\n\n\ndef main() -> None:\n ref = stream_task.run_no_wait()\n time.sleep(1)\n\n stream = ref.stream()\n\n for chunk in stream:\n print(chunk)\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
source: 'out/python/streaming/sync_stream.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import asyncio\nfrom datetime import datetime, timedelta, timezone\nfrom subprocess import Popen\nfrom typing import Any\n\nimport pytest\n\nfrom examples.streaming.worker import chunks, stream_task\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.listeners.run_event_listener import (\n StepRunEvent,\n StepRunEventType,\n)\n\n\n@pytest.mark.parametrize(\n "on_demand_worker",\n [\n (\n ["poetry", "run", "python", "examples/streaming/worker.py", "--slots", "1"],\n 8008,\n )\n ],\n indirect=True,\n)\n@pytest.mark.parametrize("execution_number", range(1))\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_streaming_ordering_and_completeness(\n execution_number: int,\n hatchet: Hatchet,\n on_demand_worker: Popen[Any],\n) -> None:\n ref = await stream_task.aio_run_no_wait()\n\n ix = 0\n anna_karenina = ""\n\n async for chunk in ref.stream():\n if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:\n assert chunks[ix] == chunk.payload\n ix += 1\n anna_karenina += chunk.payload\n\n assert ix == len(chunks)\n assert anna_karenina == "".join(chunks)\n\n await ref.aio_result()\n',
|
||||
source: 'out/python/streaming/test_streaming.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
};
|
||||
|
||||
export default snippet;
|
||||
@@ -3,12 +3,12 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import asyncio\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# > Streaming\n\nstreaming_workflow = hatchet.workflow(name="StreamingWorkflow")\n\n\n@streaming_workflow.task()\nasync def step1(input: EmptyModel, ctx: Context) -> None:\n for i in range(10):\n await asyncio.sleep(1)\n ctx.put_stream(f"Processing {i}")\n\n\ndef main() -> None:\n worker = hatchet.worker("test-worker", workflows=[streaming_workflow])\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
'import asyncio\nfrom datetime import datetime, timedelta, timezone\nfrom typing import Generator\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=False)\n\n# > Streaming\n\ncontent = """\nHappy families are all alike; every unhappy family is unhappy in its own way.\n\nEverything was in confusion in the Oblonskys\' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him. This position of affairs had now lasted three days, and not only the husband and wife themselves, but all the members of their family and household, were painfully conscious of it. Every person in the house felt that there was so sense in their living together, and that the stray people brought together by chance in any inn had more in common with one another than they, the members of the family and household of the Oblonskys. The wife did not leave her own room, the husband had not been at home for three days. The children ran wild all over the house; the English governess quarreled with the housekeeper, and wrote to a friend asking her to look out for a new situation for her; the man-cook had walked off the day before just at dinner time; the kitchen-maid, and the coachman had given warning.\n"""\n\n\ndef create_chunks(content: str, n: int) -> Generator[str, None, None]:\n for i in range(0, len(content), n):\n yield content[i : i + n]\n\n\nchunks = list(create_chunks(content, 10))\n\n\n@hatchet.task()\nasync def stream_task(input: EmptyModel, ctx: Context) -> None:\n await asyncio.sleep(2)\n\n for chunk in chunks:\n ctx.put_stream(chunk)\n await asyncio.sleep(0.05)\n\n\ndef main() -> None:\n worker = hatchet.worker("test-worker", workflows=[stream_task])\n worker.start()\n\n\n\nif __name__ == "__main__":\n main()\n',
|
||||
source: 'out/python/streaming/worker.py',
|
||||
blocks: {
|
||||
streaming: {
|
||||
start: 8,
|
||||
stop: 23,
|
||||
start: 10,
|
||||
stop: 39,
|
||||
},
|
||||
},
|
||||
highlights: {},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import pytest\n\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_execution_timeout() -> None:\n run = timeout_wf.run_no_wait()\n\n with pytest.raises(Exception, match="(Task exceeded timeout|TIMED_OUT)"):\n await run.aio_result()\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run_refresh_timeout() -> None:\n result = await refresh_timeout_wf.aio_run()\n\n assert result["refresh_task"]["status"] == "success"\n',
|
||||
'import pytest\n\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_execution_timeout() -> None:\n run = timeout_wf.run_no_wait()\n\n with pytest.raises(\n Exception,\n match="(Task exceeded timeout|TIMED_OUT|Workflow run .* failed with multiple errors)",\n ):\n await run.aio_result()\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_run_refresh_timeout() -> None:\n result = await refresh_timeout_wf.aio_run()\n\n assert result["refresh_task"]["status"] == "success"\n',
|
||||
source: 'out/python/timeout/test_timeout.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import asyncio\nimport os\n\nimport pytest\n\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.skipif(\n os.getenv("CI", "false").lower() == "true",\n reason="Skipped in CI because of unreliability",\n)\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_waits(hatchet: Hatchet) -> None:\n\n ref = task_condition_workflow.run_no_wait()\n\n await asyncio.sleep(15)\n\n hatchet.event.push("skip_on_event:skip", {})\n hatchet.event.push("wait_for_event:start", {})\n\n result = await ref.aio_result()\n\n assert result["skip_on_event"] == {"skipped": True}\n\n first_random_number = result["start"]["random_number"]\n wait_for_event_random_number = result["wait_for_event"]["random_number"]\n wait_for_sleep_random_number = result["wait_for_sleep"]["random_number"]\n\n left_branch = result["left_branch"]\n right_branch = result["right_branch"]\n\n assert left_branch.get("skipped") is True or right_branch.get("skipped") is True\n\n branch_random_number = left_branch.get("random_number") or right_branch.get(\n "random_number"\n )\n\n result_sum = result["sum"]["sum"]\n\n assert (\n result_sum\n == first_random_number\n + wait_for_event_random_number\n + wait_for_sleep_random_number\n + branch_random_number\n )\n',
|
||||
'import asyncio\n\nimport pytest\n\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope="session")\nasync def test_waits(hatchet: Hatchet) -> None:\n\n ref = task_condition_workflow.run_no_wait()\n\n await asyncio.sleep(15)\n\n hatchet.event.push("skip_on_event:skip", {})\n hatchet.event.push("wait_for_event:start", {})\n\n result = await ref.aio_result()\n\n assert result["skip_on_event"] == {"skipped": True}\n\n first_random_number = result["start"]["random_number"]\n wait_for_event_random_number = result["wait_for_event"]["random_number"]\n wait_for_sleep_random_number = result["wait_for_sleep"]["random_number"]\n\n left_branch = result["left_branch"]\n right_branch = result["right_branch"]\n\n assert left_branch.get("skipped") is True or right_branch.get("skipped") is True\n\n branch_random_number = left_branch.get("random_number") or right_branch.get(\n "random_number"\n )\n\n result_sum = result["sum"]["sum"]\n\n assert (\n result_sum\n == first_random_number\n + wait_for_event_random_number\n + wait_for_sleep_random_number\n + branch_random_number\n )\n',
|
||||
source: 'out/python/waits/test_waits.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import time\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\n# > Concurrency Strategy With Key\nclass WorkflowInput(BaseModel):\n group: str\n\n\nconcurrency_limit_rr_workflow = hatchet.workflow(\n name=\"ConcurrencyDemoWorkflowRR\",\n concurrency=ConcurrencyExpression(\n expression=\"input.group\",\n max_runs=1,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n input_validator=WorkflowInput,\n)\n\n\n@concurrency_limit_rr_workflow.task()\ndef step1(input: WorkflowInput, ctx: Context) -> None:\n print(\"starting step1\")\n time.sleep(2)\n print(\"finished step1\")\n pass\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"concurrency-demo-worker-rr\",\n slots=10,\n workflows=[concurrency_limit_rr_workflow],\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"content": "import time\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n\n# > Concurrency Strategy With Key\nclass WorkflowInput(BaseModel):\n group: str\n\n\nconcurrency_limit_rr_workflow = hatchet.workflow(\n name=\"ConcurrencyDemoWorkflowRR\",\n concurrency=ConcurrencyExpression(\n expression=\"input.group\",\n max_runs=1,\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n input_validator=WorkflowInput,\n)\n\n\n@concurrency_limit_rr_workflow.task()\ndef step1(input: WorkflowInput, ctx: Context) -> None:\n print(\"starting step1\")\n time.sleep(2)\n print(\"finished step1\")\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"concurrency-demo-worker-rr\",\n slots=10,\n workflows=[concurrency_limit_rr_workflow],\n )\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"source": "out/python/concurrency_limit_rr/worker.py",
|
||||
"blocks": {
|
||||
"concurrency_strategy_with_key": {
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import asyncio\nfrom datetime import timedelta\nfrom typing import Any\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.admin import DedupeViolationErr\n\nhatchet = Hatchet(debug=True)\n\ndedupe_parent_wf = hatchet.workflow(name=\"DedupeParent\")\ndedupe_child_wf = hatchet.workflow(name=\"DedupeChild\")\n\n\n@dedupe_parent_wf.task(execution_timeout=timedelta(minutes=1))\nasync def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:\n print(\"spawning child\")\n\n results = []\n\n for i in range(2):\n try:\n results.append(\n (\n dedupe_child_wf.aio_run(\n options=TriggerWorkflowOptions(\n additional_metadata={\"dedupe\": \"test\"}, key=f\"child{i}\"\n ),\n )\n )\n )\n except DedupeViolationErr as e:\n print(f\"dedupe violation {e}\")\n continue\n\n result = await asyncio.gather(*results)\n print(f\"results {result}\")\n\n return {\"results\": result}\n\n\n@dedupe_child_wf.task()\nasync def process(input: EmptyModel, ctx: Context) -> dict[str, str]:\n await asyncio.sleep(3)\n\n print(\"child process\")\n return {\"status\": \"success\"}\n\n\n@dedupe_child_wf.task()\nasync def process2(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\"child process2\")\n return {\"status2\": \"success\"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"fanout-worker\", slots=100, workflows=[dedupe_parent_wf, dedupe_child_wf]\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"content": "import asyncio\nfrom datetime import timedelta\nfrom typing import Any\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.exceptions import DedupeViolationError\n\nhatchet = Hatchet(debug=True)\n\ndedupe_parent_wf = hatchet.workflow(name=\"DedupeParent\")\ndedupe_child_wf = hatchet.workflow(name=\"DedupeChild\")\n\n\n@dedupe_parent_wf.task(execution_timeout=timedelta(minutes=1))\nasync def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:\n print(\"spawning child\")\n\n results = []\n\n for i in range(2):\n try:\n results.append(\n dedupe_child_wf.aio_run(\n options=TriggerWorkflowOptions(\n additional_metadata={\"dedupe\": \"test\"}, key=f\"child{i}\"\n ),\n )\n )\n except DedupeViolationError as e:\n print(f\"dedupe violation {e}\")\n continue\n\n result = await asyncio.gather(*results)\n print(f\"results {result}\")\n\n return {\"results\": result}\n\n\n@dedupe_child_wf.task()\nasync def process(input: EmptyModel, ctx: Context) -> dict[str, str]:\n await asyncio.sleep(3)\n\n print(\"child process\")\n return {\"status\": \"success\"}\n\n\n@dedupe_child_wf.task()\nasync def process2(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\"child process2\")\n return {\"status2\": \"success\"}\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"fanout-worker\", slots=100, workflows=[dedupe_parent_wf, dedupe_child_wf]\n )\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"source": "out/python/dedupe/worker.py",
|
||||
"blocks": {},
|
||||
"highlights": {}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import asyncio\nimport os\n\nimport pytest\n\nfrom examples.durable.worker import EVENT_KEY, SLEEP_TIME, durable_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.skipif(\n os.getenv(\"CI\", \"false\").lower() == \"true\",\n reason=\"Skipped in CI because of unreliability\",\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_durable(hatchet: Hatchet) -> None:\n ref = durable_workflow.run_no_wait()\n\n await asyncio.sleep(SLEEP_TIME + 10)\n\n hatchet.event.push(EVENT_KEY, {})\n\n result = await ref.aio_result()\n\n workers = await hatchet.workers.aio_list()\n\n assert workers.rows\n\n active_workers = [w for w in workers.rows if w.status == \"ACTIVE\"]\n\n assert len(active_workers) == 2\n assert any(w.name == \"e2e-test-worker\" for w in active_workers)\n assert any(w.name.endswith(\"e2e-test-worker_durable\") for w in active_workers)\n assert result[\"durable_task\"][\"status\"] == \"success\"\n",
|
||||
"content": "import asyncio\n\nimport pytest\n\nfrom examples.durable.worker import EVENT_KEY, SLEEP_TIME, durable_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_durable(hatchet: Hatchet) -> None:\n ref = durable_workflow.run_no_wait()\n\n await asyncio.sleep(SLEEP_TIME + 10)\n\n hatchet.event.push(EVENT_KEY, {})\n\n result = await ref.aio_result()\n\n workers = await hatchet.workers.aio_list()\n\n assert workers.rows\n\n active_workers = [w for w in workers.rows if w.status == \"ACTIVE\"]\n\n assert len(active_workers) == 2\n assert any(\n w.name == hatchet.config.apply_namespace(\"e2e-test-worker\")\n for w in active_workers\n )\n assert any(\n w.name == hatchet.config.apply_namespace(\"e2e-test-worker_durable\")\n for w in active_workers\n )\n assert result[\"durable_task\"][\"status\"] == \"success\"\n",
|
||||
"source": "out/python/durable/test_durable.py",
|
||||
"blocks": {},
|
||||
"highlights": {}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "from examples.events.worker import EVENT_KEY, event_workflow\nfrom hatchet_sdk import Hatchet, PushEventOptions\n\nhatchet = Hatchet()\n\n# > Create a filter\nhatchet.filters.create(\n workflow_id=event_workflow.id,\n expression=\"input.should_skip == false\",\n scope=\"foobarbaz\",\n payload={\n \"main_character\": \"Anna\",\n \"supporting_character\": \"Stiva\",\n \"location\": \"Moscow\",\n },\n)\n\n# > Skip a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n \"should_skip\": True,\n },\n options=PushEventOptions(\n scope=\"foobarbaz\",\n ),\n)\n\n# > Trigger a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n \"should_skip\": True,\n },\n options=PushEventOptions(\n scope=\"foobarbaz\",\n ),\n)\n",
|
||||
"content": "from examples.events.worker import EVENT_KEY, event_workflow\nfrom hatchet_sdk import Hatchet, PushEventOptions\n\nhatchet = Hatchet()\n\n# > Create a filter\nhatchet.filters.create(\n workflow_id=event_workflow.id,\n expression=\"input.should_skip == false\",\n scope=\"foobarbaz\",\n payload={\n \"main_character\": \"Anna\",\n \"supporting_character\": \"Stiva\",\n \"location\": \"Moscow\",\n },\n)\n\n# > Skip a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n \"should_skip\": True,\n },\n options=PushEventOptions(\n scope=\"foobarbaz\",\n ),\n)\n\n# > Trigger a run\nhatchet.event.push(\n event_key=EVENT_KEY,\n payload={\n \"should_skip\": False,\n },\n options=PushEventOptions(\n scope=\"foobarbaz\",\n ),\n)\n",
|
||||
"source": "out/python/events/filter.py",
|
||||
"blocks": {
|
||||
"create_a_filter": {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, DefaultFilter, Hatchet\n\nhatchet = Hatchet()\n\n\n# > Event trigger\nEVENT_KEY = \"user:create\"\nSECONDARY_KEY = \"foobarbaz\"\nWILDCARD_KEY = \"subscription:*\"\n\n\nclass EventWorkflowInput(BaseModel):\n should_skip: bool\n\n\nevent_workflow = hatchet.workflow(\n name=\"EventWorkflow\",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n)\n\n# > Event trigger with filter\nevent_workflow_with_filter = hatchet.workflow(\n name=\"EventWorkflow\",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n default_filters=[\n DefaultFilter(\n expression=\"true\",\n scope=\"example-scope\",\n payload={\n \"main_character\": \"Anna\",\n \"supporting_character\": \"Stiva\",\n \"location\": \"Moscow\",\n },\n )\n ],\n)\n\n\n@event_workflow.task()\ndef task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:\n print(\"event received\")\n\n return dict(ctx.filter_payload)\n\n\n# > Accessing the filter payload\n@event_workflow_with_filter.task()\ndef filtered_task(input: EventWorkflowInput, ctx: Context) -> None:\n print(ctx.filter_payload)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(name=\"EventWorker\", workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"content": "from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, DefaultFilter, Hatchet\n\nhatchet = Hatchet()\n\n\n# > Event trigger\nEVENT_KEY = \"user:create\"\nSECONDARY_KEY = \"foobarbaz\"\nWILDCARD_KEY = \"subscription:*\"\n\n\nclass EventWorkflowInput(BaseModel):\n should_skip: bool\n\n\nevent_workflow = hatchet.workflow(\n name=\"EventWorkflow\",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n)\n\n# > Event trigger with filter\nevent_workflow_with_filter = hatchet.workflow(\n name=\"EventWorkflow\",\n on_events=[EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],\n input_validator=EventWorkflowInput,\n default_filters=[\n DefaultFilter(\n expression=\"true\",\n scope=\"example-scope\",\n payload={\n \"main_character\": \"Anna\",\n \"supporting_character\": \"Stiva\",\n \"location\": \"Moscow\",\n },\n )\n ],\n)\n\n\n@event_workflow.task()\ndef task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:\n print(\"event received\")\n\n return ctx.filter_payload\n\n\n# > Accessing the filter payload\n@event_workflow_with_filter.task()\ndef filtered_task(input: EventWorkflowInput, ctx: Context) -> None:\n print(ctx.filter_payload)\n\n\n\n\ndef main() -> None:\n worker = hatchet.worker(name=\"EventWorker\", workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"source": "out/python/events/worker.py",
|
||||
"blocks": {
|
||||
"event_trigger": {
|
||||
|
||||
@@ -2,12 +2,12 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "# > Lifespan\n\nfrom typing import AsyncGenerator, cast\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass Lifespan(BaseModel):\n foo: str\n pi: float\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n yield Lifespan(foo=\"bar\", pi=3.14)\n\n\n@hatchet.task(name=\"LifespanWorkflow\")\ndef lifespan_task(input: EmptyModel, ctx: Context) -> Lifespan:\n return cast(Lifespan, ctx.lifespan)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"test-worker\", slots=1, workflows=[lifespan_task], lifespan=lifespan\n )\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"content": "# > Lifespan\n\nfrom collections.abc import AsyncGenerator\nfrom typing import cast\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\nclass Lifespan(BaseModel):\n foo: str\n pi: float\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n yield Lifespan(foo=\"bar\", pi=3.14)\n\n\n@hatchet.task(name=\"LifespanWorkflow\")\ndef lifespan_task(input: EmptyModel, ctx: Context) -> Lifespan:\n return cast(Lifespan, ctx.lifespan)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \"test-worker\", slots=1, workflows=[lifespan_task], lifespan=lifespan\n )\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"source": "out/python/lifespans/simple.py",
|
||||
"blocks": {
|
||||
"lifespan": {
|
||||
"start": 2,
|
||||
"stop": 32
|
||||
"stop": 33
|
||||
}
|
||||
},
|
||||
"highlights": {}
|
||||
|
||||
@@ -2,16 +2,16 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "from typing import AsyncGenerator, cast\nfrom uuid import UUID\n\nfrom psycopg_pool import ConnectionPool\nfrom pydantic import BaseModel, ConfigDict\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Use the lifespan in a task\nclass TaskOutput(BaseModel):\n num_rows: int\n external_ids: list[UUID]\n\n\nlifespan_workflow = hatchet.workflow(name=\"LifespanWorkflow\")\n\n\n@lifespan_workflow.task()\ndef sync_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute(\"SELECT * FROM v1_lookup_table_olap LIMIT 5;\")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print(\"executed sync task with lifespan\", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n\n\n@lifespan_workflow.task()\nasync def async_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute(\"SELECT * FROM v1_lookup_table_olap LIMIT 5;\")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print(\"executed async task with lifespan\", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n# > Define a lifespan\nclass Lifespan(BaseModel):\n model_config = ConfigDict(arbitrary_types_allowed=True)\n\n foo: str\n pool: ConnectionPool\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n print(\"Running lifespan!\")\n with ConnectionPool(\"postgres://hatchet:hatchet@localhost:5431/hatchet\") as pool:\n yield Lifespan(\n foo=\"bar\",\n pool=pool,\n )\n\n print(\"Cleaning up lifespan!\")\n\n\nworker = hatchet.worker(\n \"test-worker\", slots=1, workflows=[lifespan_workflow], lifespan=lifespan\n)\n\n\ndef main() -> None:\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"content": "from collections.abc import AsyncGenerator\nfrom typing import cast\nfrom uuid import UUID\n\nfrom psycopg_pool import ConnectionPool\nfrom pydantic import BaseModel, ConfigDict\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n# > Use the lifespan in a task\nclass TaskOutput(BaseModel):\n num_rows: int\n external_ids: list[UUID]\n\n\nlifespan_workflow = hatchet.workflow(name=\"LifespanWorkflow\")\n\n\n@lifespan_workflow.task()\ndef sync_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute(\"SELECT * FROM v1_lookup_table_olap LIMIT 5;\")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print(\"executed sync task with lifespan\", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n\n\n@lifespan_workflow.task()\nasync def async_lifespan_task(input: EmptyModel, ctx: Context) -> TaskOutput:\n pool = cast(Lifespan, ctx.lifespan).pool\n\n with pool.connection() as conn:\n query = conn.execute(\"SELECT * FROM v1_lookup_table_olap LIMIT 5;\")\n rows = query.fetchall()\n\n for row in rows:\n print(row)\n\n print(\"executed async task with lifespan\", ctx.lifespan)\n\n return TaskOutput(\n num_rows=len(rows),\n external_ids=[cast(UUID, row[0]) for row in rows],\n )\n\n\n# > Define a lifespan\nclass Lifespan(BaseModel):\n model_config = ConfigDict(arbitrary_types_allowed=True)\n\n foo: str\n pool: ConnectionPool\n\n\nasync def lifespan() -> AsyncGenerator[Lifespan, None]:\n print(\"Running lifespan!\")\n with ConnectionPool(\"postgres://hatchet:hatchet@localhost:5431/hatchet\") as pool:\n yield Lifespan(\n foo=\"bar\",\n pool=pool,\n )\n\n print(\"Cleaning up lifespan!\")\n\n\nworker = hatchet.worker(\n \"test-worker\", slots=1, workflows=[lifespan_workflow], lifespan=lifespan\n)\n\n\ndef main() -> None:\n worker.start()\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"source": "out/python/lifespans/worker.py",
|
||||
"blocks": {
|
||||
"use_the_lifespan_in_a_task": {
|
||||
"start": 13,
|
||||
"stop": 39
|
||||
"start": 14,
|
||||
"stop": 40
|
||||
},
|
||||
"define_a_lifespan": {
|
||||
"start": 62,
|
||||
"stop": 82
|
||||
"start": 63,
|
||||
"stop": 83
|
||||
}
|
||||
},
|
||||
"highlights": {}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "# > LoggingWorkflow\n\nimport logging\nimport time\n\nfrom examples.logger.client import hatchet\nfrom hatchet_sdk import Context, EmptyModel\n\nlogger = logging.getLogger(__name__)\n\nlogging_workflow = hatchet.workflow(\n name=\"LoggingWorkflow\",\n)\n\n\n@logging_workflow.task()\ndef root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n logger.info(\"executed step1 - {}\".format(i))\n logger.info({\"step1\": \"step1\"})\n\n time.sleep(0.1)\n\n return {\"status\": \"success\"}\n\n\n\n# > ContextLogger\n\n\n@logging_workflow.task()\ndef context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n ctx.log(\"executed step1 - {}\".format(i))\n ctx.log({\"step1\": \"step1\"})\n\n time.sleep(0.1)\n\n return {\"status\": \"success\"}\n\n\n",
|
||||
"content": "# > LoggingWorkflow\n\nimport logging\nimport time\n\nfrom examples.logger.client import hatchet\nfrom hatchet_sdk import Context, EmptyModel\n\nlogger = logging.getLogger(__name__)\n\nlogging_workflow = hatchet.workflow(\n name=\"LoggingWorkflow\",\n)\n\n\n@logging_workflow.task()\ndef root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n logger.info(f\"executed step1 - {i}\")\n logger.info({\"step1\": \"step1\"})\n\n time.sleep(0.1)\n\n return {\"status\": \"success\"}\n\n\n\n# > ContextLogger\n\n\n@logging_workflow.task()\ndef context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:\n for i in range(12):\n ctx.log(f\"executed step1 - {i}\")\n ctx.log({\"step1\": \"step1\"})\n\n time.sleep(0.1)\n\n return {\"status\": \"success\"}\n\n\n",
|
||||
"source": "out/python/logger/workflow.py",
|
||||
"blocks": {
|
||||
"loggingworkflow": {
|
||||
|
||||
@@ -2,32 +2,32 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "from datetime import datetime, timedelta, timezone\nfrom typing import Any, Dict, List, Mapping\n\nimport requests\nfrom pydantic import BaseModel\nfrom requests import Response\n\nfrom hatchet_sdk.context.context import Context\n\nfrom .hatchet_client import hatchet\n\n\nasync def process_image(image_url: str, filters: List[str]) -> Dict[str, Any]:\n # Do some image processing\n return {\"url\": image_url, \"size\": 100, \"format\": \"png\"}\n\n\n# > Before (Mergent)\nasync def process_image_task(request: Any) -> Dict[str, Any]:\n image_url = request.json[\"image_url\"]\n filters = request.json[\"filters\"]\n try:\n result = await process_image(image_url, filters)\n return {\"success\": True, \"processed_url\": result[\"url\"]}\n except Exception as e:\n print(f\"Image processing failed: {e}\")\n raise\n\n\n\n\n# > After (Hatchet)\nclass ImageProcessInput(BaseModel):\n image_url: str\n filters: List[str]\n\n\nclass ImageProcessOutput(BaseModel):\n processed_url: str\n metadata: Dict[str, Any]\n\n\n@hatchet.task(\n name=\"image-processor\",\n retries=3,\n execution_timeout=\"10m\",\n input_validator=ImageProcessInput,\n)\nasync def image_processor(input: ImageProcessInput, ctx: Context) -> ImageProcessOutput:\n # Do some image processing\n result = await process_image(input.image_url, input.filters)\n\n if not result[\"url\"]:\n raise ValueError(\"Processing failed to generate URL\")\n\n return ImageProcessOutput(\n processed_url=result[\"url\"],\n metadata={\n \"size\": result[\"size\"],\n \"format\": result[\"format\"],\n \"applied_filters\": input.filters,\n },\n )\n\n\n\n\nasync def run() -> None:\n # > Running a task (Mergent)\n headers: Mapping[str, str] = {\n \"Authorization\": \"Bearer <token>\",\n \"Content-Type\": \"application/json\",\n }\n\n task_data = {\n \"name\": \"4cf95241-fa19-47ef-8a67-71e483747649\",\n \"queue\": \"default\",\n \"request\": {\n \"url\": \"https://example.com\",\n \"headers\": {\n \"Authorization\": \"fake-secret-token\",\n \"Content-Type\": \"application/json\",\n },\n \"body\": \"Hello, world!\",\n },\n }\n\n try:\n response: Response = requests.post(\n \"https://api.mergent.co/v2/tasks\",\n headers=headers,\n json=task_data,\n )\n print(response.json())\n except Exception as e:\n print(f\"Error: {e}\")\n\n # > Running a task (Hatchet)\n result = await image_processor.aio_run(\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"])\n )\n\n # you can await fully typed results\n print(result)\n\n\nasync def schedule() -> None:\n # > Scheduling tasks (Mergent)\n options = {\n # same options as before\n \"json\": {\n # same body as before\n \"delay\": \"5m\"\n }\n }\n\n print(options)\n\n # > Scheduling tasks (Hatchet)\n # Schedule the task to run at a specific time\n run_at = datetime.now(tz=timezone.utc) + timedelta(days=1)\n await image_processor.aio_schedule(\n run_at,\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"]),\n )\n\n # Schedule the task to run every hour\n await image_processor.aio_create_cron(\n \"run-hourly\",\n \"0 * * * *\",\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"]),\n )\n",
|
||||
"content": "from collections.abc import Mapping\nfrom datetime import datetime, timedelta, timezone\nfrom typing import Any\n\nimport requests\nfrom pydantic import BaseModel\nfrom requests import Response\n\nfrom hatchet_sdk.context.context import Context\n\nfrom .hatchet_client import hatchet\n\n\nasync def process_image(image_url: str, filters: list[str]) -> dict[str, Any]:\n # Do some image processing\n return {\"url\": image_url, \"size\": 100, \"format\": \"png\"}\n\n\n# > Before (Mergent)\nasync def process_image_task(request: Any) -> dict[str, Any]:\n image_url = request.json[\"image_url\"]\n filters = request.json[\"filters\"]\n try:\n result = await process_image(image_url, filters)\n return {\"success\": True, \"processed_url\": result[\"url\"]}\n except Exception as e:\n print(f\"Image processing failed: {e}\")\n raise\n\n\n\n\n# > After (Hatchet)\nclass ImageProcessInput(BaseModel):\n image_url: str\n filters: list[str]\n\n\nclass ImageProcessOutput(BaseModel):\n processed_url: str\n metadata: dict[str, Any]\n\n\n@hatchet.task(\n name=\"image-processor\",\n retries=3,\n execution_timeout=\"10m\",\n input_validator=ImageProcessInput,\n)\nasync def image_processor(input: ImageProcessInput, ctx: Context) -> ImageProcessOutput:\n # Do some image processing\n result = await process_image(input.image_url, input.filters)\n\n if not result[\"url\"]:\n raise ValueError(\"Processing failed to generate URL\")\n\n return ImageProcessOutput(\n processed_url=result[\"url\"],\n metadata={\n \"size\": result[\"size\"],\n \"format\": result[\"format\"],\n \"applied_filters\": input.filters,\n },\n )\n\n\n\n\nasync def run() -> None:\n # > Running a task (Mergent)\n headers: Mapping[str, str] = {\n \"Authorization\": \"Bearer <token>\",\n \"Content-Type\": \"application/json\",\n }\n\n task_data = {\n \"name\": \"4cf95241-fa19-47ef-8a67-71e483747649\",\n \"queue\": \"default\",\n \"request\": {\n \"url\": \"https://example.com\",\n \"headers\": {\n \"Authorization\": \"fake-secret-token\",\n \"Content-Type\": \"application/json\",\n },\n \"body\": \"Hello, world!\",\n },\n }\n\n try:\n response: Response = requests.post(\n \"https://api.mergent.co/v2/tasks\",\n headers=headers,\n json=task_data,\n )\n print(response.json())\n except Exception as e:\n print(f\"Error: {e}\")\n\n # > Running a task (Hatchet)\n result = await image_processor.aio_run(\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"])\n )\n\n # you can await fully typed results\n print(result)\n\n\nasync def schedule() -> None:\n # > Scheduling tasks (Mergent)\n options = {\n # same options as before\n \"json\": {\n # same body as before\n \"delay\": \"5m\"\n }\n }\n\n print(options)\n\n # > Scheduling tasks (Hatchet)\n # Schedule the task to run at a specific time\n run_at = datetime.now(tz=timezone.utc) + timedelta(days=1)\n await image_processor.aio_schedule(\n run_at,\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"]),\n )\n\n # Schedule the task to run every hour\n await image_processor.aio_create_cron(\n \"run-hourly\",\n \"0 * * * *\",\n ImageProcessInput(image_url=\"https://example.com/image.png\", filters=[\"blur\"]),\n )\n",
|
||||
"source": "out/python/migration_guides/mergent.py",
|
||||
"blocks": {
|
||||
"before_mergent": {
|
||||
"start": 19,
|
||||
"stop": 29
|
||||
"start": 20,
|
||||
"stop": 30
|
||||
},
|
||||
"after_hatchet": {
|
||||
"start": 33,
|
||||
"stop": 65
|
||||
"start": 34,
|
||||
"stop": 66
|
||||
},
|
||||
"running_a_task_mergent": {
|
||||
"start": 70,
|
||||
"stop": 96
|
||||
"start": 71,
|
||||
"stop": 97
|
||||
},
|
||||
"running_a_task_hatchet": {
|
||||
"start": 99,
|
||||
"stop": 104
|
||||
"start": 100,
|
||||
"stop": 105
|
||||
},
|
||||
"scheduling_tasks_mergent": {
|
||||
"start": 109,
|
||||
"stop": 115
|
||||
"start": 110,
|
||||
"stop": 116
|
||||
},
|
||||
"scheduling_tasks_hatchet": {
|
||||
"start": 120,
|
||||
"stop": 132
|
||||
"start": 121,
|
||||
"stop": 133
|
||||
}
|
||||
},
|
||||
"highlights": {}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import pytest\n\nfrom examples.non_retryable.worker import (\n non_retryable_workflow,\n should_not_retry,\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n)\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType\nfrom hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails\n\n\ndef find_id(runs: V1WorkflowRunDetails, match: str) -> str:\n return next(t.metadata.id for t in runs.tasks if match in t.display_name)\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_no_retry(hatchet: Hatchet) -> None:\n ref = await non_retryable_workflow.aio_run_no_wait()\n\n with pytest.raises(Exception, match=\"retry\"):\n await ref.aio_result()\n\n runs = await hatchet.runs.aio_get(ref.workflow_run_id)\n task_to_id = {\n task: find_id(runs, task.name)\n for task in [\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n should_not_retry,\n ]\n }\n\n retrying_events = [\n e for e in runs.task_events if e.event_type == V1TaskEventType.RETRYING\n ]\n\n \"\"\"Only one task should be retried.\"\"\"\n assert len(retrying_events) == 1\n\n \"\"\"The task id of the retrying events should match the tasks that are retried\"\"\"\n assert {e.task_id for e in retrying_events} == {\n task_to_id[should_retry_wrong_exception_type],\n }\n\n \"\"\"Three failed events should emit, one each for the two failing initial runs and one for the retry.\"\"\"\n assert (\n len([e for e in runs.task_events if e.event_type == V1TaskEventType.FAILED])\n == 3\n )\n",
|
||||
"content": "import asyncio\n\nimport pytest\n\nfrom examples.non_retryable.worker import (\n non_retryable_workflow,\n should_not_retry,\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n)\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType\nfrom hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails\nfrom hatchet_sdk.exceptions import FailedTaskRunExceptionGroup\n\n\ndef find_id(runs: V1WorkflowRunDetails, match: str) -> str:\n return next(t.metadata.id for t in runs.tasks if match in t.display_name)\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_no_retry(hatchet: Hatchet) -> None:\n ref = await non_retryable_workflow.aio_run_no_wait()\n\n with pytest.raises(FailedTaskRunExceptionGroup) as exc_info:\n await ref.aio_result()\n\n exception_group = exc_info.value\n\n assert len(exception_group.exceptions) == 2\n\n exc_text = [e.exc for e in exception_group.exceptions]\n\n non_retries = [\n e\n for e in exc_text\n if \"This task should retry because it's not a NonRetryableException\" in e\n ]\n\n other_errors = [e for e in exc_text if \"This task should not retry\" in e]\n\n assert len(non_retries) == 1\n assert len(other_errors) == 1\n\n await asyncio.sleep(3)\n\n runs = await hatchet.runs.aio_get(ref.workflow_run_id)\n task_to_id = {\n task: find_id(runs, task.name)\n for task in [\n should_not_retry_successful_task,\n should_retry_wrong_exception_type,\n should_not_retry,\n ]\n }\n\n retrying_events = [\n e for e in runs.task_events if e.event_type == V1TaskEventType.RETRYING\n ]\n\n \"\"\"Only one task should be retried.\"\"\"\n assert len(retrying_events) == 1\n\n \"\"\"The task id of the retrying events should match the tasks that are retried\"\"\"\n assert retrying_events[0].task_id == task_to_id[should_retry_wrong_exception_type]\n\n \"\"\"Three failed events should emit, one each for the two failing initial runs and one for the retry.\"\"\"\n assert (\n len([e for e in runs.task_events if e.event_type == V1TaskEventType.FAILED])\n == 3\n )\n",
|
||||
"source": "out/python/non_retryable/test_no_retry.py",
|
||||
"blocks": {},
|
||||
"highlights": {}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import base64\nimport os\n\nfrom langfuse import Langfuse # type: ignore[import-untyped]\nfrom langfuse.openai import AsyncOpenAI # type: ignore[import-untyped]\n\n# > Configure Langfuse\nLANGFUSE_AUTH = base64.b64encode(\n f\"{os.getenv('LANGFUSE_PUBLIC_KEY')}:{os.getenv('LANGFUSE_SECRET_KEY')}\".encode()\n).decode()\n\nos.environ[\"OTEL_EXPORTER_OTLP_ENDPOINT\"] = (\n os.getenv(\"LANGFUSE_HOST\", \"https://us.cloud.langfuse.com\") + \"/api/public/otel\"\n)\nos.environ[\"OTEL_EXPORTER_OTLP_HEADERS\"] = f\"Authorization=Basic {LANGFUSE_AUTH}\"\n\n## Note: Langfuse sets the global tracer provider, so you don't need to worry about it\nlf = Langfuse(\n public_key=os.getenv(\"LANGFUSE_PUBLIC_KEY\"),\n secret_key=os.getenv(\"LANGFUSE_SECRET_KEY\"),\n host=os.getenv(\"LANGFUSE_HOST\", \"https://app.langfuse.com\"),\n)\n\n# > Create OpenAI client\nopenai = AsyncOpenAI(\n api_key=os.getenv(\"OPENAI_API_KEY\"),\n)\n",
|
||||
"content": "import base64\nimport os\n\nfrom langfuse import Langfuse # type: ignore\nfrom langfuse.openai import AsyncOpenAI # type: ignore\n\n# > Configure Langfuse\nLANGFUSE_AUTH = base64.b64encode(\n f\"{os.getenv('LANGFUSE_PUBLIC_KEY')}:{os.getenv('LANGFUSE_SECRET_KEY')}\".encode()\n).decode()\n\nos.environ[\"OTEL_EXPORTER_OTLP_ENDPOINT\"] = (\n os.getenv(\"LANGFUSE_HOST\", \"https://us.cloud.langfuse.com\") + \"/api/public/otel\"\n)\nos.environ[\"OTEL_EXPORTER_OTLP_HEADERS\"] = f\"Authorization=Basic {LANGFUSE_AUTH}\"\n\n## Note: Langfuse sets the global tracer provider, so you don't need to worry about it\nlf = Langfuse(\n public_key=os.getenv(\"LANGFUSE_PUBLIC_KEY\"),\n secret_key=os.getenv(\"LANGFUSE_SECRET_KEY\"),\n host=os.getenv(\"LANGFUSE_HOST\", \"https://app.langfuse.com\"),\n)\n\n# > Create OpenAI client\nopenai = AsyncOpenAI(\n api_key=os.getenv(\"OPENAI_API_KEY\"),\n)\n",
|
||||
"source": "out/python/opentelemetry_instrumentation/langfuse/client.py",
|
||||
"blocks": {
|
||||
"configure_langfuse": {
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import asyncio\n\nfrom langfuse import get_client # type: ignore[import-untyped]\nfrom opentelemetry.trace import StatusCode\n\nfrom examples.opentelemetry_instrumentation.langfuse.worker import langfuse_task\n\n# > Trigger task\ntracer = get_client()\n\n\nasync def main() -> None:\n # Traces will send to Langfuse\n # Use `_otel_tracer` to access the OpenTelemetry tracer if you need\n # to e.g. log statuses or attributes manually.\n with tracer._otel_tracer.start_as_current_span(name=\"trigger\") as span:\n result = await langfuse_task.aio_run()\n location = result.get(\"location\")\n\n if not location:\n span.set_status(StatusCode.ERROR)\n return\n\n span.set_attribute(\"location\", location)\n\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n",
|
||||
"content": "import asyncio\n\nfrom langfuse import get_client # type: ignore\nfrom opentelemetry.trace import StatusCode\n\nfrom examples.opentelemetry_instrumentation.langfuse.worker import langfuse_task\n\n# > Trigger task\ntracer = get_client()\n\n\nasync def main() -> None:\n # Traces will send to Langfuse\n # Use `_otel_tracer` to access the OpenTelemetry tracer if you need\n # to e.g. log statuses or attributes manually.\n with tracer._otel_tracer.start_as_current_span(name=\"trigger\") as span:\n result = await langfuse_task.aio_run()\n location = result.get(\"location\")\n\n if not location:\n span.set_status(StatusCode.ERROR)\n return\n\n span.set_attribute(\"location\", location)\n\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n",
|
||||
"source": "out/python/opentelemetry_instrumentation/langfuse/trigger.py",
|
||||
"blocks": {
|
||||
"trigger_task": {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import asyncio\n\nfrom examples.streaming.worker import streaming_workflow\n\n\nasync def main() -> None:\n ref = await streaming_workflow.aio_run_no_wait()\n await asyncio.sleep(1)\n\n stream = ref.stream()\n\n async for chunk in stream:\n print(chunk)\n\n\nif __name__ == \"__main__\":\n import asyncio\n\n asyncio.run(main())\n",
|
||||
"content": "import asyncio\n\nfrom examples.streaming.worker import stream_task\nfrom hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType\n\n\nasync def main() -> None:\n ref = await stream_task.aio_run_no_wait()\n\n async for chunk in ref.stream():\n if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:\n print(chunk.payload, flush=True, end=\"\")\n\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n",
|
||||
"source": "out/python/streaming/async_stream.py",
|
||||
"blocks": {},
|
||||
"highlights": {}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import async_stream from './async_stream';
|
||||
import sync_stream from './sync_stream';
|
||||
import test_streaming from './test_streaming';
|
||||
import worker from './worker';
|
||||
|
||||
export { async_stream }
|
||||
export { sync_stream }
|
||||
export { test_streaming }
|
||||
export { worker }
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import time\n\nfrom examples.streaming.worker import streaming_workflow\n\n\ndef main() -> None:\n ref = streaming_workflow.run_no_wait()\n time.sleep(1)\n\n stream = ref.stream()\n\n for chunk in stream:\n print(chunk)\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"content": "import time\n\nfrom examples.streaming.worker import stream_task\n\n\ndef main() -> None:\n ref = stream_task.run_no_wait()\n time.sleep(1)\n\n stream = ref.stream()\n\n for chunk in stream:\n print(chunk)\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"source": "out/python/streaming/sync_stream.py",
|
||||
"blocks": {},
|
||||
"highlights": {}
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import asyncio\nfrom datetime import datetime, timedelta, timezone\nfrom subprocess import Popen\nfrom typing import Any\n\nimport pytest\n\nfrom examples.streaming.worker import chunks, stream_task\nfrom hatchet_sdk import Hatchet\nfrom hatchet_sdk.clients.listeners.run_event_listener import (\n StepRunEvent,\n StepRunEventType,\n)\n\n\n@pytest.mark.parametrize(\n \"on_demand_worker\",\n [\n (\n [\"poetry\", \"run\", \"python\", \"examples/streaming/worker.py\", \"--slots\", \"1\"],\n 8008,\n )\n ],\n indirect=True,\n)\n@pytest.mark.parametrize(\"execution_number\", range(1))\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_streaming_ordering_and_completeness(\n execution_number: int,\n hatchet: Hatchet,\n on_demand_worker: Popen[Any],\n) -> None:\n ref = await stream_task.aio_run_no_wait()\n\n ix = 0\n anna_karenina = \"\"\n\n async for chunk in ref.stream():\n if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:\n assert chunks[ix] == chunk.payload\n ix += 1\n anna_karenina += chunk.payload\n\n assert ix == len(chunks)\n assert anna_karenina == \"\".join(chunks)\n\n await ref.aio_result()\n",
|
||||
"source": "out/python/streaming/test_streaming.py",
|
||||
"blocks": {},
|
||||
"highlights": {}
|
||||
};
|
||||
|
||||
export default snippet;
|
||||
@@ -2,12 +2,12 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import asyncio\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# > Streaming\n\nstreaming_workflow = hatchet.workflow(name=\"StreamingWorkflow\")\n\n\n@streaming_workflow.task()\nasync def step1(input: EmptyModel, ctx: Context) -> None:\n for i in range(10):\n await asyncio.sleep(1)\n ctx.put_stream(f\"Processing {i}\")\n\n\ndef main() -> None:\n worker = hatchet.worker(\"test-worker\", workflows=[streaming_workflow])\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"content": "import asyncio\nfrom datetime import datetime, timedelta, timezone\nfrom typing import Generator\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=False)\n\n# > Streaming\n\ncontent = \"\"\"\nHappy families are all alike; every unhappy family is unhappy in its own way.\n\nEverything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him. This position of affairs had now lasted three days, and not only the husband and wife themselves, but all the members of their family and household, were painfully conscious of it. Every person in the house felt that there was so sense in their living together, and that the stray people brought together by chance in any inn had more in common with one another than they, the members of the family and household of the Oblonskys. The wife did not leave her own room, the husband had not been at home for three days. The children ran wild all over the house; the English governess quarreled with the housekeeper, and wrote to a friend asking her to look out for a new situation for her; the man-cook had walked off the day before just at dinner time; the kitchen-maid, and the coachman had given warning.\n\"\"\"\n\n\ndef create_chunks(content: str, n: int) -> Generator[str, None, None]:\n for i in range(0, len(content), n):\n yield content[i : i + n]\n\n\nchunks = list(create_chunks(content, 10))\n\n\n@hatchet.task()\nasync def stream_task(input: EmptyModel, ctx: Context) -> None:\n await asyncio.sleep(2)\n\n for chunk in chunks:\n ctx.put_stream(chunk)\n await asyncio.sleep(0.05)\n\n\ndef main() -> None:\n worker = hatchet.worker(\"test-worker\", workflows=[stream_task])\n worker.start()\n\n\n\nif __name__ == \"__main__\":\n main()\n",
|
||||
"source": "out/python/streaming/worker.py",
|
||||
"blocks": {
|
||||
"streaming": {
|
||||
"start": 8,
|
||||
"stop": 23
|
||||
"start": 10,
|
||||
"stop": 39
|
||||
}
|
||||
},
|
||||
"highlights": {}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import pytest\n\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_execution_timeout() -> None:\n run = timeout_wf.run_no_wait()\n\n with pytest.raises(Exception, match=\"(Task exceeded timeout|TIMED_OUT)\"):\n await run.aio_result()\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run_refresh_timeout() -> None:\n result = await refresh_timeout_wf.aio_run()\n\n assert result[\"refresh_task\"][\"status\"] == \"success\"\n",
|
||||
"content": "import pytest\n\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_execution_timeout() -> None:\n run = timeout_wf.run_no_wait()\n\n with pytest.raises(\n Exception,\n match=\"(Task exceeded timeout|TIMED_OUT|Workflow run .* failed with multiple errors)\",\n ):\n await run.aio_result()\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_run_refresh_timeout() -> None:\n result = await refresh_timeout_wf.aio_run()\n\n assert result[\"refresh_task\"][\"status\"] == \"success\"\n",
|
||||
"source": "out/python/timeout/test_timeout.py",
|
||||
"blocks": {},
|
||||
"highlights": {}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
"language": "python",
|
||||
"content": "import asyncio\nimport os\n\nimport pytest\n\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.skipif(\n os.getenv(\"CI\", \"false\").lower() == \"true\",\n reason=\"Skipped in CI because of unreliability\",\n)\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_waits(hatchet: Hatchet) -> None:\n\n ref = task_condition_workflow.run_no_wait()\n\n await asyncio.sleep(15)\n\n hatchet.event.push(\"skip_on_event:skip\", {})\n hatchet.event.push(\"wait_for_event:start\", {})\n\n result = await ref.aio_result()\n\n assert result[\"skip_on_event\"] == {\"skipped\": True}\n\n first_random_number = result[\"start\"][\"random_number\"]\n wait_for_event_random_number = result[\"wait_for_event\"][\"random_number\"]\n wait_for_sleep_random_number = result[\"wait_for_sleep\"][\"random_number\"]\n\n left_branch = result[\"left_branch\"]\n right_branch = result[\"right_branch\"]\n\n assert left_branch.get(\"skipped\") is True or right_branch.get(\"skipped\") is True\n\n branch_random_number = left_branch.get(\"random_number\") or right_branch.get(\n \"random_number\"\n )\n\n result_sum = result[\"sum\"][\"sum\"]\n\n assert (\n result_sum\n == first_random_number\n + wait_for_event_random_number\n + wait_for_sleep_random_number\n + branch_random_number\n )\n",
|
||||
"content": "import asyncio\n\nimport pytest\n\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope=\"session\")\nasync def test_waits(hatchet: Hatchet) -> None:\n\n ref = task_condition_workflow.run_no_wait()\n\n await asyncio.sleep(15)\n\n hatchet.event.push(\"skip_on_event:skip\", {})\n hatchet.event.push(\"wait_for_event:start\", {})\n\n result = await ref.aio_result()\n\n assert result[\"skip_on_event\"] == {\"skipped\": True}\n\n first_random_number = result[\"start\"][\"random_number\"]\n wait_for_event_random_number = result[\"wait_for_event\"][\"random_number\"]\n wait_for_sleep_random_number = result[\"wait_for_sleep\"][\"random_number\"]\n\n left_branch = result[\"left_branch\"]\n right_branch = result[\"right_branch\"]\n\n assert left_branch.get(\"skipped\") is True or right_branch.get(\"skipped\") is True\n\n branch_random_number = left_branch.get(\"random_number\") or right_branch.get(\n \"random_number\"\n )\n\n result_sum = result[\"sum\"][\"sum\"]\n\n assert (\n result_sum\n == first_random_number\n + wait_for_event_random_number\n + wait_for_sleep_random_number\n + branch_random_number\n )\n",
|
||||
"source": "out/python/waits/test_waits.py",
|
||||
"blocks": {},
|
||||
"highlights": {}
|
||||
|
||||
@@ -3,7 +3,7 @@ export default {
|
||||
title: "Why Go is a good fit for agents",
|
||||
},
|
||||
"warning-event-loop-blocked": {
|
||||
title: "Warning: The Event Loop May Be Blocked",
|
||||
title: "Warning! The Event Loop May Be Blocked",
|
||||
},
|
||||
"fastest-postgres-inserts": {
|
||||
title: "The fastest Postgres inserts",
|
||||
|
||||
@@ -197,6 +197,10 @@ First line of defense: look for things that are obviously blocking. API calls, d
|
||||
|
||||
As a last resort, you can also change your tasks from being async to sync, although we don't recommend this in the majority of cases.
|
||||
|
||||
### Use a linter
|
||||
|
||||
[Ruff](https://docs.astral.sh/ruff/), via `flake8` (for example), has an [`ASYNC` linting rule](https://docs.astral.sh/ruff/rules/#flake8-async-async) to help you catch potential issues in async code.
|
||||
|
||||
### Instrument your code
|
||||
|
||||
If you've resolved all of the obvious issues but the Scary Warning ™️ is still popping up, instrumenting your code can help find the bottleneck. Hatchet's Python SDK provides [an OpenTelemetry Instrumentor](../home/opentelemetry.mdx), which allows you to easily export traces and spans from your Hatchet workers. If you have some long-running tasks (or long start times), you can use the traces to get a better sense for what might be blocking. In particular, if there are some async operations that appear to just be hanging for significantly longer durations than they should take, this is a good indication they're being blocked by something.
|
||||
|
||||
@@ -6,6 +6,13 @@ export default {
|
||||
},
|
||||
},
|
||||
|
||||
context: {
|
||||
title: "Context",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
"feature-clients": {
|
||||
title: "Feature Clients",
|
||||
theme: {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Hatchet Python SDK Reference
|
||||
|
||||
This is the Python SDK reference, documenting methods available for interacting with Hatchet resources. Check out the [user guide](../../home) for an introduction to getting your first tasks running.
|
||||
This is the Python SDK reference, documenting methods available for interacting with Hatchet resources. Check out the [user guide](../../home) for an introduction for getting your first tasks running.
|
||||
|
||||
## The Hatchet Python Client
|
||||
|
||||
@@ -13,7 +13,7 @@ Methods:
|
||||
| Name | Description |
|
||||
| -------------- | ------------------------------------------------------------------------------------------------------------- |
|
||||
| `worker` | Create a Hatchet worker on which to run workflows. |
|
||||
| `workflow` | Define a Hatchet workflow, which can then declare `task`s and be `run`, `scheduled`, and so on. |
|
||||
| `workflow` | Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on. |
|
||||
| `task` | A decorator to transform a function into a standalone Hatchet task that runs as part of a workflow. |
|
||||
| `durable_task` | A decorator to transform a function into a standalone Hatchet _durable_ task that runs as part of a workflow. |
|
||||
|
||||
@@ -73,14 +73,14 @@ Create a Hatchet worker on which to run workflows.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------- | ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------- |
|
||||
| `name` | `str` | The name of the worker. | _required_ |
|
||||
| `slots` | `int` | The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time. | `100` |
|
||||
| `durable_slots` | `int` | The number of durable workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time that are durable. | `1000` |
|
||||
| `labels` | `dict[str, Union[str, int]]` | A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. | `{}` |
|
||||
| `workflows` | `list[BaseWorkflow[Any]]` | A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. | `[]` |
|
||||
| `lifespan` | `LifespanFn \| None` | A lifespan function to run on the worker. This function will be called when the worker is started, and can be used to perform any setup or teardown tasks. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------- | --------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------- |
|
||||
| `name` | `str` | The name of the worker. | _required_ |
|
||||
| `slots` | `int` | The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time. | `100` |
|
||||
| `durable_slots` | `int` | The number of durable workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time that are durable. | `1000` |
|
||||
| `labels` | `dict[str, str \| int] \| None` | A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. | `None` |
|
||||
| `workflows` | `list[BaseWorkflow[Any]] \| None` | A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. | `None` |
|
||||
| `lifespan` | `LifespanFn \| None` | A lifespan function to run on the worker. This function will be called when the worker is started, and can be used to perform any setup or teardown tasks. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -90,7 +90,7 @@ Returns:
|
||||
|
||||
#### `workflow`
|
||||
|
||||
Define a Hatchet workflow, which can then declare `task`s and be `run`, `scheduled`, and so on.
|
||||
Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -98,15 +98,15 @@ Parameters:
|
||||
| ------------------ | -------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------- |
|
||||
| `name` | `str` | The name of the workflow. | _required_ |
|
||||
| `description` | `str \| None` | A description for the workflow. | `None` |
|
||||
| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the `input` to the tasks in the workflow. If no validator is provided, defaults to an `EmptyModel` under the hood. The `EmptyModel` is a Pydantic model with no fields specified, and with the `extra` config option set to `"allow"`. | `None` |
|
||||
| `on_events` | `list[str]` | A list of event triggers for the workflow - events which cause the workflow to be run. | `[]` |
|
||||
| `on_crons` | `list[str]` | A list of cron triggers for the workflow. | `[]` |
|
||||
| `input_validator` | `type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the `input` to the tasks in the workflow. If no validator is provided, defaults to an `EmptyModel` under the hood. The `EmptyModel` is a Pydantic model with no fields specified, and with the `extra` config option set to `"allow"`. | `None` |
|
||||
| `on_events` | `list[str] \| None` | A list of event triggers for the workflow - events which cause the workflow to be run. | `None` |
|
||||
| `on_crons` | `list[str] \| None` | A list of cron triggers for the workflow. | `None` |
|
||||
| `version` | `str \| None` | A version for the workflow. | `None` |
|
||||
| `sticky` | `StickyStrategy \| None` | A sticky strategy for the workflow. | `None` |
|
||||
| `default_priority` | `int` | The priority of the workflow. Higher values will cause this workflow to have priority in scheduling over other, lower priority ones. | `1` |
|
||||
| `concurrency` | `ConcurrencyExpression \| list[ConcurrencyExpression] \| None` | A concurrency object controlling the concurrency settings for this workflow. | `None` |
|
||||
| `task_defaults` | `TaskDefaults` | A `TaskDefaults` object controlling the default task settings for this workflow. | `TaskDefaults()` |
|
||||
| `default_filters` | `list[DefaultFilter]` | A list of filters to create with the workflow is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the workflow is created to create them. | `[]` |
|
||||
| `default_filters` | `list[DefaultFilter] \| None` | A list of filters to create with the workflow is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the workflow is created to create them. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -124,9 +124,9 @@ Parameters:
|
||||
| ----------------------- | -------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | `None` |
|
||||
| `description` | `str \| None` | An optional description for the task. | `None` |
|
||||
| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`. | `None` |
|
||||
| `on_events` | `list[str]` | A list of event triggers for the task - events which cause the task to be run. | `[]` |
|
||||
| `on_crons` | `list[str]` | A list of cron triggers for the task. | `[]` |
|
||||
| `input_validator` | `type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`. | `None` |
|
||||
| `on_events` | `list[str] \| None` | A list of event triggers for the task - events which cause the task to be run. | `None` |
|
||||
| `on_crons` | `list[str] \| None` | A list of cron triggers for the task. | `None` |
|
||||
| `version` | `str \| None` | A version for the task. | `None` |
|
||||
| `sticky` | `StickyStrategy \| None` | A sticky strategy for the task. | `None` |
|
||||
| `default_priority` | `int` | The priority of the task. Higher values will cause this task to have priority in scheduling. | `1` |
|
||||
@@ -134,11 +134,11 @@ Parameters:
|
||||
| `schedule_timeout` | `Duration` | The maximum time allowed for scheduling the task. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time allowed for executing the task. | `timedelta(seconds=60)` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `{}` |
|
||||
| `rate_limits` | `list[RateLimit] \| None` | A list of rate limit configurations for the task. | `None` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel] \| None` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `None` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `default_filters` | `list[DefaultFilter]` | A list of filters to create with the task is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the task is created to create them. | `[]` |
|
||||
| `default_filters` | `list[DefaultFilter] \| None` | A list of filters to create with the task is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the task is created to create them. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -156,9 +156,9 @@ Parameters:
|
||||
| ----------------------- | -------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | `None` |
|
||||
| `description` | `str \| None` | An optional description for the task. | `None` |
|
||||
| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`. | `None` |
|
||||
| `on_events` | `list[str]` | A list of event triggers for the task - events which cause the task to be run. | `[]` |
|
||||
| `on_crons` | `list[str]` | A list of cron triggers for the task. | `[]` |
|
||||
| `input_validator` | `type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`. | `None` |
|
||||
| `on_events` | `list[str] \| None` | A list of event triggers for the task - events which cause the task to be run. | `None` |
|
||||
| `on_crons` | `list[str] \| None` | A list of cron triggers for the task. | `None` |
|
||||
| `version` | `str \| None` | A version for the task. | `None` |
|
||||
| `sticky` | `StickyStrategy \| None` | A sticky strategy for the task. | `None` |
|
||||
| `default_priority` | `int` | The priority of the task. Higher values will cause this task to have priority in scheduling. | `1` |
|
||||
@@ -166,11 +166,11 @@ Parameters:
|
||||
| `schedule_timeout` | `Duration` | The maximum time allowed for scheduling the task. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time allowed for executing the task. | `timedelta(seconds=60)` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `{}` |
|
||||
| `rate_limits` | `list[RateLimit] \| None` | A list of rate limit configurations for the task. | `None` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel] \| None` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `None` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `default_filters` | `list[DefaultFilter]` | A list of filters to create with the task is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the task is created to create them. | `[]` |
|
||||
| `default_filters` | `list[DefaultFilter] \| None` | A list of filters to create with the task is created. Note that this is a helper to allow you to create filters "declaratively" without needing to make a separate API call once the task is created to create them. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
|
||||
330
frontend/docs/pages/sdks/python/context.mdx
Normal file
330
frontend/docs/pages/sdks/python/context.mdx
Normal file
@@ -0,0 +1,330 @@
|
||||
# Context
|
||||
|
||||
The Hatchet Context class provides helper methods and useful data to tasks at runtime. It is passed as the second argument to all tasks and durable tasks.
|
||||
|
||||
There are two types of context classes you'll encounter:
|
||||
|
||||
- `Context`: The standard context for regular tasks with methods for logging, task output retrieval, cancellation, and more.
|
||||
- `DurableContext`: An extended context for durable tasks that includes additional methods for durable execution like `aio_wait_for` and `aio_sleep_for`.
|
||||
|
||||
## Context
|
||||
|
||||
### Methods
|
||||
|
||||
| Name | Description |
|
||||
| ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `was_skipped` | Check if a given task was skipped. You can read about skipping in [the docs](../../home/conditional-workflows#skip_if). |
|
||||
| `task_output` | Get the output of a parent task in a DAG. |
|
||||
| `cancel` | Cancel the current task run. This will call the Hatchet API to cancel the step run and set the exit flag to True. |
|
||||
| `aio_cancel` | Cancel the current task run. This will call the Hatchet API to cancel the step run and set the exit flag to True. |
|
||||
| `done` | Check if the current task run has been cancelled. |
|
||||
| `log` | Log a line to the Hatchet API. This will send the log line to the Hatchet API and return immediately. |
|
||||
| `release_slot` | Manually release the slot for the current step run to free up a slot on the worker. Note that this is an advanced feature and should be used with caution. |
|
||||
| `put_stream` | Put a stream event to the Hatchet API. This will send the data to the Hatchet API and return immediately. You can then subscribe to the stream from a separate consumer. |
|
||||
| `refresh_timeout` | Refresh the timeout for the current task run. You can read about refreshing timeouts in [the docs](../../home/timeouts#refreshing-timeouts). |
|
||||
| `fetch_task_run_error` | A helper intended to be used in an on-failure step to retrieve the error that occurred in a specific upstream task run. |
|
||||
|
||||
### Attributes
|
||||
|
||||
#### `was_triggered_by_event`
|
||||
|
||||
A property that indicates whether the workflow was triggered by an event.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ---------------------------------------------------------------- |
|
||||
| `bool` | True if the workflow was triggered by an event, False otherwise. |
|
||||
|
||||
#### `workflow_input`
|
||||
|
||||
The input to the workflow, as a dictionary. It's recommended to use the `input` parameter to the task (the first argument passed into the task at runtime) instead of this property.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------- | -------------------------- |
|
||||
| `JSONSerializableMapping` | The input to the workflow. |
|
||||
|
||||
#### `lifespan`
|
||||
|
||||
The worker lifespan, if it exists. You can read about lifespans in [the docs](../../home/lifespans).
|
||||
|
||||
**Note: You'll need to cast the return type of this property to the type returned by your lifespan generator.**
|
||||
|
||||
#### `workflow_run_id`
|
||||
|
||||
The id of the current workflow run.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----- | ----------------------------------- |
|
||||
| `str` | The id of the current workflow run. |
|
||||
|
||||
#### `retry_count`
|
||||
|
||||
The retry count of the current task run, which corresponds to the number of times the task has been retried.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----- | ---------------------------------------- |
|
||||
| `int` | The retry count of the current task run. |
|
||||
|
||||
#### `attempt_number`
|
||||
|
||||
The attempt number of the current task run, which corresponds to the number of times the task has been attempted, including the initial attempt. This is one more than the retry count.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----- | ------------------------------------------- |
|
||||
| `int` | The attempt number of the current task run. |
|
||||
|
||||
#### `additional_metadata`
|
||||
|
||||
The additional metadata sent with the current task run.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------------------------- | --------------------------------------------------------------------------------------------------- |
|
||||
| `JSONSerializableMapping \| None` | The additional metadata sent with the current task run, or None if no additional metadata was sent. |
|
||||
|
||||
#### `parent_workflow_run_id`
|
||||
|
||||
The parent workflow run id of the current task run, if it exists. This is useful for knowing which workflow run spawned this run as a child.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------- | --------------------------------------------------------------------------------- |
|
||||
| `str \| None` | The parent workflow run id of the current task run, or None if it does not exist. |
|
||||
|
||||
#### `priority`
|
||||
|
||||
The priority that the current task was run with.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------- | --------------------------------------------------------------------- |
|
||||
| `int \| None` | The priority of the current task run, or None if no priority was set. |
|
||||
|
||||
#### `workflow_id`
|
||||
|
||||
The id of the workflow that this task belongs to.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------- | ------------------------------------------------- |
|
||||
| `str \| None` | The id of the workflow that this task belongs to. |
|
||||
|
||||
#### `workflow_version_id`
|
||||
|
||||
The id of the workflow version that this task belongs to.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------- | --------------------------------------------------------- |
|
||||
| `str \| None` | The id of the workflow version that this task belongs to. |
|
||||
|
||||
#### `task_run_errors`
|
||||
|
||||
A helper intended to be used in an on-failure step to retrieve the errors that occurred in upstream task runs.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------- | -------------------------------------------------------- |
|
||||
| `dict[str, str]` | A dictionary mapping task names to their error messages. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `was_skipped`
|
||||
|
||||
Check if a given task was skipped. You can read about skipping in [the docs](../../home/conditional-workflows#skip_if).
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------ | ------------------------- | ------------------------------------------------- | ---------- |
|
||||
| `task` | `Task[TWorkflowInput, R]` | The task to check the status of (skipped or not). | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ---------------------------------------------- |
|
||||
| `bool` | True if the task was skipped, False otherwise. |
|
||||
|
||||
#### `task_output`
|
||||
|
||||
Get the output of a parent task in a DAG.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------ | ------------------------- | ------------------------------------------- | ---------- |
|
||||
| `task` | `Task[TWorkflowInput, R]` | The task whose output you want to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---- | ----------------------------------------------------------------------- |
|
||||
| `R` | The output of the parent task, validated against the task's validators. |
|
||||
|
||||
Raises:
|
||||
|
||||
| Type | Description |
|
||||
| ------------ | ------------------------------------------------------------------------ |
|
||||
| `ValueError` | If the task was skipped or if the step output for the task is not found. |
|
||||
|
||||
#### `cancel`
|
||||
|
||||
Cancel the current task run. This will call the Hatchet API to cancel the step run and set the exit flag to True.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `aio_cancel`
|
||||
|
||||
Cancel the current task run. This will call the Hatchet API to cancel the step run and set the exit flag to True.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `done`
|
||||
|
||||
Check if the current task run has been cancelled.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | --------------------------------------------------------- |
|
||||
| `bool` | True if the task run has been cancelled, False otherwise. |
|
||||
|
||||
#### `log`
|
||||
|
||||
Log a line to the Hatchet API. This will send the log line to the Hatchet API and return immediately.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ---------------- | -------------------------------- | --------------------------------------------------------------------- | ---------- |
|
||||
| `line` | `str \| JSONSerializableMapping` | The line to log. Can be a string or a JSON serializable mapping. | _required_ |
|
||||
| `raise_on_error` | `bool` | If True, will raise an exception if the log fails. Defaults to False. | `False` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `release_slot`
|
||||
|
||||
Manually release the slot for the current step run to free up a slot on the worker. Note that this is an advanced feature and should be used with caution.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `put_stream`
|
||||
|
||||
Put a stream event to the Hatchet API. This will send the data to the Hatchet API and return immediately. You can then subscribe to the stream from a separate consumer.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------ | -------------- | -------------------------------------------------------------- | ---------- |
|
||||
| `data` | `str \| bytes` | The data to send to the Hatchet API. Can be a string or bytes. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `refresh_timeout`
|
||||
|
||||
Refresh the timeout for the current task run. You can read about refreshing timeouts in [the docs](../../home/timeouts#refreshing-timeouts).
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------------- | ------------------ | -------------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `increment_by` | `str \| timedelta` | The amount of time to increment the timeout by. Can be a string (e.g. "5m") or a timedelta object. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `fetch_task_run_error`
|
||||
|
||||
A helper intended to be used in an on-failure step to retrieve the error that occurred in a specific upstream task run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------ | ------------------------- | ------------------------------------------ | ---------- |
|
||||
| `task` | `Task[TWorkflowInput, R]` | The task whose error you want to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------- | ---------------------------------------------------------------- |
|
||||
| `str \| None` | The error message of the task run, or None if no error occurred. |
|
||||
|
||||
## DurableContext
|
||||
|
||||
Bases: `Context`
|
||||
|
||||
### Methods
|
||||
|
||||
| Name | Description |
|
||||
| --------------- | -------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `aio_wait_for` | Durably wait for either a sleep or an event. |
|
||||
| `aio_sleep_for` | Lightweight wrapper for durable sleep. Allows for shorthand usage of `ctx.aio_wait_for` when specifying a sleep condition. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `aio_wait_for`
|
||||
|
||||
Durably wait for either a sleep or an event.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | -------------------------------------- | -------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `signal_key` | `str` | The key to use for the durable event. This is used to identify the event in the Hatchet API. | _required_ |
|
||||
| `*conditions` | `SleepCondition \| UserEventCondition` | The conditions to wait for. Can be a SleepCondition or UserEventCondition. | `()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------- | ------------------------------------------------ |
|
||||
| `dict[str, Any]` | A dictionary containing the results of the wait. |
|
||||
|
||||
Raises:
|
||||
|
||||
| Type | Description |
|
||||
| ------------ | ----------------------------------------------- |
|
||||
| `ValueError` | If the durable event listener is not available. |
|
||||
|
||||
#### `aio_sleep_for`
|
||||
|
||||
Lightweight wrapper for durable sleep. Allows for shorthand usage of `ctx.aio_wait_for` when specifying a sleep condition.
|
||||
|
||||
For more complicated conditions, use `ctx.aio_wait_for` directly.
|
||||
@@ -27,12 +27,12 @@ Create a new filter.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | ------------------------- | ---------------------------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to associate with the filter. | _required_ |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping` | The payload to send with the filter. | `{}` |
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | --------------------------------- | ---------------------------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to associate with the filter. | _required_ |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping \| None` | The payload to send with the filter. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -113,12 +113,12 @@ Create a new filter.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | ------------------------- | ---------------------------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to associate with the filter. | _required_ |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping` | The payload to send with the filter. | `{}` |
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | --------------------------------- | ---------------------------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to associate with the filter. | _required_ |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping \| None` | The payload to send with the filter. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
|
||||
@@ -154,12 +154,12 @@ IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workfl
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to trigger. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the workflow run. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata associated with the workflow run. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the workflow run. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | --------------------------------- | ----------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to trigger. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the workflow run. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata associated with the workflow run. | `None` |
|
||||
| `priority` | `int \| None` | The priority of the workflow run. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -175,12 +175,12 @@ IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workfl
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to trigger. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the workflow run. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata associated with the workflow run. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the workflow run. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | --------------------------------- | ----------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to trigger. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the workflow run. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata associated with the workflow run. | `None` |
|
||||
| `priority` | `int \| None` | The priority of the workflow run. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
|
||||
@@ -78,16 +78,16 @@ Retrieves a list of scheduled workflows based on provided filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------------ | ------------------------------------------ | ---------------------------------------------------- | ------- |
|
||||
| `offset` | `int \| None` | The offset to use in pagination. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of scheduled workflows to return. | `None` |
|
||||
| `workflow_id` | `str \| None` | The ID of the workflow to filter by. | `None` |
|
||||
| `parent_workflow_run_id` | `str \| None` | The ID of the parent workflow run to filter by. | `None` |
|
||||
| `statuses` | `list[ScheduledRunStatus] \| None` | A list of statuses to filter by. | `None` |
|
||||
| `additional_metadata` | `Optional[JSONSerializableMapping]` | Additional metadata to filter by. | `None` |
|
||||
| `order_by_field` | `Optional[ScheduledWorkflowsOrderByField]` | The field to order the results by. | `None` |
|
||||
| `order_by_direction` | `Optional[WorkflowRunOrderByDirection]` | The direction to order the results by. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------------ | ---------------------------------------- | ---------------------------------------------------- | ------- |
|
||||
| `offset` | `int \| None` | The offset to use in pagination. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of scheduled workflows to return. | `None` |
|
||||
| `workflow_id` | `str \| None` | The ID of the workflow to filter by. | `None` |
|
||||
| `parent_workflow_run_id` | `str \| None` | The ID of the parent workflow run to filter by. | `None` |
|
||||
| `statuses` | `list[ScheduledRunStatus] \| None` | A list of statuses to filter by. | `None` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata to filter by. | `None` |
|
||||
| `order_by_field` | `ScheduledWorkflowsOrderByField \| None` | The field to order the results by. | `None` |
|
||||
| `order_by_direction` | `WorkflowRunOrderByDirection \| None` | The direction to order the results by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -154,16 +154,16 @@ Retrieves a list of scheduled workflows based on provided filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------------ | ------------------------------------------ | ---------------------------------------------------- | ------- |
|
||||
| `offset` | `int \| None` | The offset to use in pagination. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of scheduled workflows to return. | `None` |
|
||||
| `workflow_id` | `str \| None` | The ID of the workflow to filter by. | `None` |
|
||||
| `parent_workflow_run_id` | `str \| None` | The ID of the parent workflow run to filter by. | `None` |
|
||||
| `statuses` | `list[ScheduledRunStatus] \| None` | A list of statuses to filter by. | `None` |
|
||||
| `additional_metadata` | `Optional[JSONSerializableMapping]` | Additional metadata to filter by. | `None` |
|
||||
| `order_by_field` | `Optional[ScheduledWorkflowsOrderByField]` | The field to order the results by. | `None` |
|
||||
| `order_by_direction` | `Optional[WorkflowRunOrderByDirection]` | The direction to order the results by. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------------ | ---------------------------------------- | ---------------------------------------------------- | ------- |
|
||||
| `offset` | `int \| None` | The offset to use in pagination. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of scheduled workflows to return. | `None` |
|
||||
| `workflow_id` | `str \| None` | The ID of the workflow to filter by. | `None` |
|
||||
| `parent_workflow_run_id` | `str \| None` | The ID of the parent workflow run to filter by. | `None` |
|
||||
| `statuses` | `list[ScheduledRunStatus] \| None` | A list of statuses to filter by. | `None` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata to filter by. | `None` |
|
||||
| `order_by_field` | `ScheduledWorkflowsOrderByField \| None` | The field to order the results by. | `None` |
|
||||
| `order_by_direction` | `WorkflowRunOrderByDirection \| None` | The direction to order the results by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
|
||||
@@ -44,29 +44,29 @@ Tasks within workflows can be defined with `@workflow.task()` or `@workflow.dura
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ---------------------- | -------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `task` | A decorator to transform a function into a Hatchet task that runs as part of a workflow. |
|
||||
| `durable_task` | A decorator to transform a function into a durable Hatchet task that runs as part of a workflow. |
|
||||
| `on_failure_task` | A decorator to transform a function into a Hatchet on-failure task that runs as the last step in a workflow with failures. |
|
||||
| `on_success_task` | A decorator to transform a function into a Hatchet on-success task that runs as the last step in a successful workflow. |
|
||||
| `run` | Run the workflow synchronously and wait for it to complete. |
|
||||
| `aio_run` | Run the workflow asynchronously and wait for it to complete. |
|
||||
| `run_no_wait` | Synchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `aio_run_no_wait` | Asynchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `aio_run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `aio_run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `aio_schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `create_cron` | Create a cron job for the workflow. |
|
||||
| `aio_create_cron` | Create a cron job for the workflow. |
|
||||
| `create_bulk_run_item` | Create a bulk run item for the workflow. Intended for use with `run_many` methods. |
|
||||
| `list_runs` | List runs of the workflow. |
|
||||
| `aio_list_runs` | List runs of the workflow. |
|
||||
| `create_filter` | Create a new filter. |
|
||||
| `aio_create_filter` | Create a new filter. |
|
||||
| Name | Description |
|
||||
| ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `task` | A decorator to transform a function into a Hatchet task that runs as part of a workflow. |
|
||||
| `durable_task` | A decorator to transform a function into a durable Hatchet task that runs as part of a workflow. |
|
||||
| `on_failure_task` | A decorator to transform a function into a Hatchet on-failure task that runs as the last step in a workflow that had at least one task fail. |
|
||||
| `on_success_task` | A decorator to transform a function into a Hatchet on-success task that runs as the last step in a workflow that had all upstream tasks succeed. |
|
||||
| `run` | Run the workflow synchronously and wait for it to complete. |
|
||||
| `aio_run` | Run the workflow asynchronously and wait for it to complete. |
|
||||
| `run_no_wait` | Synchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `aio_run_no_wait` | Asynchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `aio_run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `aio_run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `aio_schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `create_cron` | Create a cron job for the workflow. |
|
||||
| `aio_create_cron` | Create a cron job for the workflow. |
|
||||
| `create_bulk_run_item` | Create a bulk run item for the workflow. This is intended to be used in conjunction with the various `run_many` methods. |
|
||||
| `list_runs` | List runs of the workflow. |
|
||||
| `aio_list_runs` | List runs of the workflow. |
|
||||
| `create_filter` | Create a new filter. |
|
||||
| `aio_create_filter` | Create a new filter. |
|
||||
|
||||
### Attributes
|
||||
|
||||
@@ -100,21 +100,21 @@ A decorator to transform a function into a Hatchet task that runs as part of a w
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------------- | --------------------------------- | ---------------------------------------------------------------------- | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the task. Defaults to the name of the function. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. | `timedelta(seconds=60)` |
|
||||
| `parents` | `list[Task[TWorkflowInput, Any]]` | A list of tasks that are parents of the task. | `[]` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary determining worker assignment. | `{}` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds for retries with exponential backoff. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression]` | A list of concurrency expressions for the task. | `[]` |
|
||||
| `wait_for` | `list[Condition \| OrGroup]` | A list of conditions that must be met before the task can run. | `[]` |
|
||||
| `skip_if` | `list[Condition \| OrGroup]` | A list of conditions that, if met, will cause the task to be skipped. | `[]` |
|
||||
| `cancel_if` | `list[Condition \| OrGroup]` | A list of conditions that, if met, will cause the task to be canceled. | `[]` |
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------------- | ----------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time. | `timedelta(seconds=60)` |
|
||||
| `parents` | `list[Task[TWorkflowInput, Any]] \| None` | A list of tasks that are parents of the task. Note: Parents must be defined before their children. | `None` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit] \| None` | A list of rate limit configurations for the task. | `None` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel] \| None` | A dictionary of desired worker labels that determine to which worker the task should be assigned. See documentation and examples on affinity and worker labels for more details. | `None` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression] \| None` | A list of concurrency expressions for the task. | `None` |
|
||||
| `wait_for` | `list[Condition \| OrGroup] \| None` | A list of conditions that must be met before the task can run. | `None` |
|
||||
| `skip_if` | `list[Condition \| OrGroup] \| None` | A list of conditions that, if met, will cause the task to be skipped. | `None` |
|
||||
| `cancel_if` | `list[Condition \| OrGroup] \| None` | A list of conditions that, if met, will cause the task to be canceled. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -126,25 +126,27 @@ Returns:
|
||||
|
||||
A decorator to transform a function into a durable Hatchet task that runs as part of a workflow.
|
||||
|
||||
**IMPORTANT:** This decorator creates a _durable_ task, which works using Hatchet's durable execution capabilities.
|
||||
**IMPORTANT:** This decorator creates a _durable_ task, which works using Hatchet's durable execution capabilities. This is an advanced feature of Hatchet.
|
||||
|
||||
See the Hatchet docs for more information on durable execution to decide if this is right for you.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------------- | --------------------------------- | ---------------------------------------------------------------------- | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the task. Defaults to the name of the function. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. | `timedelta(seconds=60)` |
|
||||
| `parents` | `list[Task[TWorkflowInput, Any]]` | A list of tasks that are parents of the task. | `[]` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary determining worker assignment. | `{}` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds for retries with exponential backoff. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression]` | A list of concurrency expressions for the task. | `[]` |
|
||||
| `wait_for` | `list[Condition \| OrGroup]` | A list of conditions that must be met before the task can run. | `[]` |
|
||||
| `skip_if` | `list[Condition \| OrGroup]` | A list of conditions that, if met, will cause the task to be skipped. | `[]` |
|
||||
| `cancel_if` | `list[Condition \| OrGroup]` | A list of conditions that, if met, will cause the task to be canceled. | `[]` |
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------------- | ----------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time. | `timedelta(seconds=60)` |
|
||||
| `parents` | `list[Task[TWorkflowInput, Any]] \| None` | A list of tasks that are parents of the task. Note: Parents must be defined before their children. | `None` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit] \| None` | A list of rate limit configurations for the task. | `None` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel] \| None` | A dictionary of desired worker labels that determine to which worker the task should be assigned. See documentation and examples on affinity and worker labels for more details. | `None` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression] \| None` | A list of concurrency expressions for the task. | `None` |
|
||||
| `wait_for` | `list[Condition \| OrGroup] \| None` | A list of conditions that must be met before the task can run. | `None` |
|
||||
| `skip_if` | `list[Condition \| OrGroup] \| None` | A list of conditions that, if met, will cause the task to be skipped. | `None` |
|
||||
| `cancel_if` | `list[Condition \| OrGroup] \| None` | A list of conditions that, if met, will cause the task to be canceled. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -158,16 +160,16 @@ A decorator to transform a function into a Hatchet on-failure task that runs as
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ----------------------------- | ------------------------------------------------------------------------------------ | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the on-failure task. Defaults to the name of the function. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. | `timedelta(seconds=60)` |
|
||||
| `retries` | `int` | The number of times to retry the on-failure task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the on-failure task. | `[]` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression]` | A list of concurrency expressions for the on-success task. | `[]` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the on-failure task. If not specified, defaults to the name of the function being wrapped by the `on_failure_task` decorator. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time. | `timedelta(seconds=60)` |
|
||||
| `retries` | `int` | The number of times to retry the on-failure task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit] \| None` | A list of rate limit configurations for the on-failure task. | `None` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression] \| None` | A list of concurrency expressions for the on-success task. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -181,16 +183,16 @@ A decorator to transform a function into a Hatchet on-success task that runs as
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ----------------------------- | ------------------------------------------------------------------------------------ | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the on-success task. Defaults to the name of the function. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. | `timedelta(seconds=60)` |
|
||||
| `retries` | `int` | The number of times to retry the on-success task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the on-success task. | `[]` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression]` | A list of concurrency expressions for the on-success task. | `[]` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | ----------------------- |
|
||||
| `name` | `str \| None` | The name of the on-success task. If not specified, defaults to the name of the function being wrapped by the `on_success_task` decorator. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time. | `timedelta(minutes=5)` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time. | `timedelta(seconds=60)` |
|
||||
| `retries` | `int` | The number of times to retry the on-success task before failing | `0` |
|
||||
| `rate_limits` | `list[RateLimit] \| None` | A list of rate limit configurations for the on-success task. | `None` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression] \| None` | A list of concurrency expressions for the on-success task. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -221,7 +223,7 @@ Returns:
|
||||
|
||||
Run the workflow asynchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
This method triggers a workflow run, awaits until completion, and returns the final result.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -304,7 +306,9 @@ Returns:
|
||||
|
||||
#### `run_many_no_wait`
|
||||
|
||||
Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
Run a workflow in bulk without waiting for all runs to complete.
|
||||
|
||||
This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -320,7 +324,9 @@ Returns:
|
||||
|
||||
#### `aio_run_many_no_wait`
|
||||
|
||||
Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
Run a workflow in bulk without waiting for all runs to complete.
|
||||
|
||||
This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -376,13 +382,13 @@ Create a cron job for the workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata for the cron job. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | --------------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata for the cron job. | `None` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -396,13 +402,13 @@ Create a cron job for the workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata for the cron job. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | --------------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata for the cron job. | `None` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -416,17 +422,17 @@ Create a bulk run item for the workflow. This is intended to be used in conjunct
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ---------------------------------------- | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `key` | `str \| None` | The key for the workflow run. | `None` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for the workflow run. | `TriggerWorkflowOptions()` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ----------------------------------------------------------------------------------------------------------- | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `key` | `str \| None` | The key for the workflow run. This is used to identify the run in the bulk operation and for deduplication. | `None` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for the workflow run. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------------- | -------------------------------------------------------------------------------------------- |
|
||||
| `WorkflowRunTriggerConfig` | A `WorkflowRunTriggerConfig` object to trigger the workflow run, used in `run_many` methods. |
|
||||
| Type | Description |
|
||||
| -------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `WorkflowRunTriggerConfig` | A `WorkflowRunTriggerConfig` object that can be used to trigger the workflow run, which you then pass into the `run_many` methods. |
|
||||
|
||||
#### `list_runs`
|
||||
|
||||
@@ -484,11 +490,11 @@ Create a new filter.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------ | ------------------------- | ------------------------------------------ | ---------- |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping` | The payload to send with the filter. | `{}` |
|
||||
| Name | Type | Description | Default |
|
||||
| ------------ | --------------------------------- | ------------------------------------------ | ---------- |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping \| None` | The payload to send with the filter. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -502,11 +508,11 @@ Create a new filter.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------ | ------------------------- | ------------------------------------------ | ---------- |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping` | The payload to send with the filter. | `{}` |
|
||||
| Name | Type | Description | Default |
|
||||
| ------------ | --------------------------------- | ------------------------------------------ | ---------- |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping \| None` | The payload to send with the filter. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -520,31 +526,33 @@ Bases: `BaseWorkflow[TWorkflowInput]`, `Generic[TWorkflowInput, R]`
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ---------------------- | ---------------------------------------------------------------------------------- |
|
||||
| `run` | Synchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `aio_run` | Run the workflow asynchronously and wait for it to complete. |
|
||||
| `run_no_wait` | Run the workflow synchronously and wait for it to complete. |
|
||||
| `aio_run_no_wait` | Asynchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `aio_run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `aio_run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `aio_schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `create_cron` | Create a cron job for the workflow. |
|
||||
| `aio_create_cron` | Create a cron job for the workflow. |
|
||||
| `create_bulk_run_item` | Create a bulk run item for the workflow. Intended for use with `run_many` methods. |
|
||||
| `list_runs` | List runs of the workflow. |
|
||||
| `aio_list_runs` | List runs of the workflow. |
|
||||
| `create_filter` | Create a new filter. |
|
||||
| `aio_create_filter` | Create a new filter. |
|
||||
| Name | Description |
|
||||
| ---------------------- | ------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `run` | Run the workflow synchronously and wait for it to complete. |
|
||||
| `aio_run` | Run the workflow asynchronously and wait for it to complete. |
|
||||
| `run_no_wait` | Trigger a workflow run without waiting for it to complete. |
|
||||
| `aio_run_no_wait` | Asynchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `aio_run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `aio_run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `aio_schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `create_cron` | Create a cron job for the workflow. |
|
||||
| `aio_create_cron` | Create a cron job for the workflow. |
|
||||
| `create_bulk_run_item` | Create a bulk run item for the workflow. This is intended to be used in conjunction with the various `run_many` methods. |
|
||||
| `list_runs` | List runs of the workflow. |
|
||||
| `aio_list_runs` | List runs of the workflow. |
|
||||
| `create_filter` | Create a new filter. |
|
||||
| `aio_create_filter` | Create a new filter. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `run`
|
||||
|
||||
Synchronously trigger a workflow run without waiting for it to complete. This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
Run the workflow synchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the extracted result.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -555,15 +563,15 @@ Parameters:
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---- | ------------------------------------------------------------------------- |
|
||||
| `R` | A `WorkflowRunRef` object representing the reference to the workflow run. |
|
||||
| Type | Description |
|
||||
| ---- | ----------------------------------------------- |
|
||||
| `R` | The extracted result of the workflow execution. |
|
||||
|
||||
#### `aio_run`
|
||||
|
||||
Run the workflow asynchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
This method triggers a workflow run, awaits until completion, and returns the extracted result.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -574,15 +582,15 @@ Parameters:
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---- | ----------------------------------------------------- |
|
||||
| `R` | The result of the workflow execution as a dictionary. |
|
||||
| Type | Description |
|
||||
| ---- | ----------------------------------------------- |
|
||||
| `R` | The extracted result of the workflow execution. |
|
||||
|
||||
#### `run_no_wait`
|
||||
|
||||
Run the workflow synchronously and wait for it to complete.
|
||||
Trigger a workflow run without waiting for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
This method triggers a workflow run and immediately returns a reference to the run without blocking while the workflow runs.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -593,9 +601,9 @@ Parameters:
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------------- | ----------------------------------------------------- |
|
||||
| `TaskRunRef[TWorkflowInput, R]` | The result of the workflow execution as a dictionary. |
|
||||
| Type | Description |
|
||||
| ------------------------------- | --------------------------------------------------------------------- |
|
||||
| `TaskRunRef[TWorkflowInput, R]` | A `TaskRunRef` object representing the reference to the workflow run. |
|
||||
|
||||
#### `aio_run_no_wait`
|
||||
|
||||
@@ -610,9 +618,9 @@ Parameters:
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------------- | ------------------------------------------------------------------------- |
|
||||
| `TaskRunRef[TWorkflowInput, R]` | A `WorkflowRunRef` object representing the reference to the workflow run. |
|
||||
| Type | Description |
|
||||
| ------------------------------- | --------------------------------------------------------------------- |
|
||||
| `TaskRunRef[TWorkflowInput, R]` | A `TaskRunRef` object representing the reference to the workflow run. |
|
||||
|
||||
#### `run_many`
|
||||
|
||||
@@ -648,7 +656,9 @@ Returns:
|
||||
|
||||
#### `run_many_no_wait`
|
||||
|
||||
Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
Run a workflow in bulk without waiting for all runs to complete.
|
||||
|
||||
This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -664,7 +674,9 @@ Returns:
|
||||
|
||||
#### `aio_run_many_no_wait`
|
||||
|
||||
Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
Run a workflow in bulk without waiting for all runs to complete.
|
||||
|
||||
This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
Parameters:
|
||||
|
||||
@@ -720,13 +732,13 @@ Create a cron job for the workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata for the cron job. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | --------------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata for the cron job. | `None` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -740,13 +752,13 @@ Create a cron job for the workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata for the cron job. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | --------------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata for the cron job. | `None` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -760,17 +772,17 @@ Create a bulk run item for the workflow. This is intended to be used in conjunct
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ---------------------------------------- | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `key` | `str \| None` | The key for the workflow run. | `None` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for the workflow run. | `TriggerWorkflowOptions()` |
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ----------------------------------------------------------------------------------------------------------- | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `key` | `str \| None` | The key for the workflow run. This is used to identify the run in the bulk operation and for deduplication. | `None` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for the workflow run. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------------- | -------------------------------------------------------------------------------------------- |
|
||||
| `WorkflowRunTriggerConfig` | A `WorkflowRunTriggerConfig` object to trigger the workflow run, used in `run_many` methods. |
|
||||
| Type | Description |
|
||||
| -------------------------- | ---------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `WorkflowRunTriggerConfig` | A `WorkflowRunTriggerConfig` object that can be used to trigger the workflow run, which you then pass into the `run_many` methods. |
|
||||
|
||||
#### `list_runs`
|
||||
|
||||
@@ -828,11 +840,11 @@ Create a new filter.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------ | ------------------------- | ------------------------------------------ | ---------- |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping` | The payload to send with the filter. | `{}` |
|
||||
| Name | Type | Description | Default |
|
||||
| ------------ | --------------------------------- | ------------------------------------------ | ---------- |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping \| None` | The payload to send with the filter. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
@@ -846,11 +858,11 @@ Create a new filter.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------ | ------------------------- | ------------------------------------------ | ---------- |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping` | The payload to send with the filter. | `{}` |
|
||||
| Name | Type | Description | Default |
|
||||
| ------------ | --------------------------------- | ------------------------------------------ | ---------- |
|
||||
| `expression` | `str` | The expression to evaluate for the filter. | _required_ |
|
||||
| `scope` | `str` | The scope for the filter. | _required_ |
|
||||
| `payload` | `JSONSerializableMapping \| None` | The payload to send with the filter. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"github.com/hatchet-dev/hatchet/internal/telemetry"
|
||||
"github.com/hatchet-dev/hatchet/pkg/repository/postgres/dbsqlc"
|
||||
"github.com/hatchet-dev/hatchet/pkg/repository/postgres/sqlchelpers"
|
||||
"github.com/hatchet-dev/hatchet/pkg/repository/v1/sqlcv1"
|
||||
)
|
||||
|
||||
func (o *OLAPControllerImpl) runTenantTaskStatusUpdates(ctx context.Context) func() {
|
||||
@@ -45,6 +46,10 @@ func (o *OLAPControllerImpl) updateTaskStatuses(ctx context.Context, tenantId st
|
||||
payloads := make([]tasktypes.NotifyFinalizedPayload, 0, len(rows))
|
||||
|
||||
for _, row := range rows {
|
||||
if row.ReadableStatus != sqlcv1.V1ReadableStatusOlapCOMPLETED && row.ReadableStatus != sqlcv1.V1ReadableStatusOlapCANCELLED && row.ReadableStatus != sqlcv1.V1ReadableStatusOlapFAILED {
|
||||
continue
|
||||
}
|
||||
|
||||
payloads = append(payloads, tasktypes.NotifyFinalizedPayload{
|
||||
ExternalId: sqlchelpers.UUIDToStr(row.ExternalId),
|
||||
Status: row.ReadableStatus,
|
||||
|
||||
@@ -1599,6 +1599,7 @@ type WorkflowEvent struct {
|
||||
StepRetries *int32 `protobuf:"varint,8,opt,name=stepRetries,proto3,oneof" json:"stepRetries,omitempty"`
|
||||
// (optional) the retry count of this step
|
||||
RetryCount *int32 `protobuf:"varint,9,opt,name=retryCount,proto3,oneof" json:"retryCount,omitempty"`
|
||||
EventIndex *int64 `protobuf:"varint,10,opt,name=eventIndex,proto3,oneof" json:"eventIndex,omitempty"`
|
||||
}
|
||||
|
||||
func (x *WorkflowEvent) Reset() {
|
||||
@@ -1696,6 +1697,13 @@ func (x *WorkflowEvent) GetRetryCount() int32 {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *WorkflowEvent) GetEventIndex() int64 {
|
||||
if x != nil && x.EventIndex != nil {
|
||||
return *x.EventIndex
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type WorkflowRunEvent struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
@@ -2464,7 +2472,7 @@ var file_dispatcher_proto_rawDesc = []byte{
|
||||
0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71,
|
||||
0x75, 0x65, 0x73, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72,
|
||||
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0xa5, 0x03, 0x0a, 0x0d, 0x57,
|
||||
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0xd9, 0x03, 0x0a, 0x0d, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x24, 0x0a, 0x0d,
|
||||
0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e,
|
||||
@@ -2489,186 +2497,189 @@ var file_dispatcher_proto_rawDesc = []byte{
|
||||
0x73, 0x74, 0x65, 0x70, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x88, 0x01, 0x01, 0x12, 0x23,
|
||||
0x0a, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x09, 0x20, 0x01,
|
||||
0x28, 0x05, 0x48, 0x01, 0x52, 0x0a, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74,
|
||||
0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x74, 0x65, 0x70, 0x52, 0x65, 0x74, 0x72,
|
||||
0x69, 0x65, 0x73, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x72, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75,
|
||||
0x6e, 0x74, 0x22, 0xdb, 0x01, 0x0a, 0x10, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52,
|
||||
0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66,
|
||||
0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d,
|
||||
0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x33, 0x0a,
|
||||
0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e,
|
||||
0x32, 0x15, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76,
|
||||
0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79,
|
||||
0x70, 0x65, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73,
|
||||
0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f,
|
||||
0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d,
|
||||
0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d,
|
||||
0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x28, 0x0a, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74,
|
||||
0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x52, 0x75,
|
||||
0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x73,
|
||||
0x22, 0xbe, 0x01, 0x0a, 0x0d, 0x53, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x73, 0x75,
|
||||
0x6c, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64,
|
||||
0x12, 0x26, 0x0a, 0x0e, 0x73, 0x74, 0x65, 0x70, 0x52, 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65,
|
||||
0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x74, 0x65, 0x70, 0x52, 0x65,
|
||||
0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x6a, 0x6f, 0x62, 0x52,
|
||||
0x75, 0x6e, 0x49, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6a, 0x6f, 0x62, 0x52,
|
||||
0x75, 0x6e, 0x49, 0x64, 0x12, 0x19, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x04, 0x20,
|
||||
0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x12,
|
||||
0x1b, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48,
|
||||
0x01, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06,
|
||||
0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6f, 0x75, 0x74, 0x70, 0x75,
|
||||
0x74, 0x22, 0x7f, 0x0a, 0x0d, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61,
|
||||
0x74, 0x61, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64,
|
||||
0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
|
||||
0x70, 0x61, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x63, 0x61,
|
||||
0x6c, 0x6c, 0x65, 0x72, 0x46, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x0e, 0x63, 0x61, 0x6c, 0x6c, 0x65, 0x72, 0x46, 0x69, 0x6c, 0x65, 0x6e, 0x61,
|
||||
0x6d, 0x65, 0x22, 0x17, 0x0a, 0x15, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44,
|
||||
0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x6c, 0x0a, 0x10, 0x48,
|
||||
0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
|
||||
0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12, 0x3c, 0x0a, 0x0b, 0x68,
|
||||
0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x41, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b,
|
||||
0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62,
|
||||
0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0b, 0x68, 0x65,
|
||||
0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x41, 0x74, 0x22, 0x13, 0x0a, 0x11, 0x48, 0x65, 0x61,
|
||||
0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x65,
|
||||
0x0a, 0x15, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52,
|
||||
0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70,
|
||||
0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x2e, 0x0a, 0x12, 0x69, 0x6e, 0x63, 0x72, 0x65, 0x6d, 0x65,
|
||||
0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x42, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x12, 0x69, 0x6e, 0x63, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65,
|
||||
0x6f, 0x75, 0x74, 0x42, 0x79, 0x22, 0x52, 0x0a, 0x16, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68,
|
||||
0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
|
||||
0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x41, 0x74, 0x18, 0x01, 0x20, 0x01,
|
||||
0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74,
|
||||
0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09,
|
||||
0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x41, 0x74, 0x22, 0x32, 0x0a, 0x12, 0x52, 0x65, 0x6c,
|
||||
0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
|
||||
0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x22, 0x15, 0x0a,
|
||||
0x13, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70,
|
||||
0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x37, 0x0a, 0x04, 0x53, 0x44, 0x4b, 0x53, 0x12, 0x0b, 0x0a, 0x07,
|
||||
0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x06, 0x0a, 0x02, 0x47, 0x4f, 0x10,
|
||||
0x01, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x59, 0x54, 0x48, 0x4f, 0x4e, 0x10, 0x02, 0x12, 0x0e, 0x0a,
|
||||
0x0a, 0x54, 0x59, 0x50, 0x45, 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x10, 0x03, 0x2a, 0x4e, 0x0a,
|
||||
0x0a, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a, 0x0e, 0x53,
|
||||
0x54, 0x41, 0x52, 0x54, 0x5f, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x00, 0x12,
|
||||
0x13, 0x0a, 0x0f, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52,
|
||||
0x55, 0x4e, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x54, 0x41, 0x52, 0x54, 0x5f, 0x47, 0x45,
|
||||
0x54, 0x5f, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x10, 0x02, 0x2a, 0xa2, 0x01,
|
||||
0x0a, 0x17, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e,
|
||||
0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a, 0x1c, 0x47, 0x52, 0x4f,
|
||||
0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50,
|
||||
0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x20, 0x0a, 0x1c, 0x47,
|
||||
0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54,
|
||||
0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x22, 0x0a,
|
||||
0x1e, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54,
|
||||
0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x44, 0x10,
|
||||
0x02, 0x12, 0x1f, 0x0a, 0x1b, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45,
|
||||
0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44,
|
||||
0x10, 0x03, 0x2a, 0xac, 0x01, 0x0a, 0x13, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, 0x74, 0x69, 0x6f,
|
||||
0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x54,
|
||||
0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e,
|
||||
0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x53, 0x54, 0x45, 0x50, 0x5f,
|
||||
0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54,
|
||||
0x45, 0x44, 0x10, 0x01, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45,
|
||||
0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45,
|
||||
0x44, 0x10, 0x02, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e,
|
||||
0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x12,
|
||||
0x20, 0x0a, 0x1c, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59,
|
||||
0x50, 0x45, 0x5f, 0x41, 0x43, 0x4b, 0x4e, 0x4f, 0x57, 0x4c, 0x45, 0x44, 0x47, 0x45, 0x44, 0x10,
|
||||
0x04, 0x2a, 0x65, 0x0a, 0x0c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x79, 0x70,
|
||||
0x65, 0x12, 0x19, 0x0a, 0x15, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59,
|
||||
0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1a, 0x0a, 0x16,
|
||||
0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54,
|
||||
0x45, 0x50, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x01, 0x12, 0x1e, 0x0a, 0x1a, 0x52, 0x45, 0x53, 0x4f,
|
||||
0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x57, 0x4f, 0x52, 0x4b, 0x46, 0x4c,
|
||||
0x4f, 0x57, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x02, 0x2a, 0xfe, 0x01, 0x0a, 0x11, 0x52, 0x65, 0x73,
|
||||
0x6f, 0x75, 0x72, 0x63, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1f,
|
||||
0x0a, 0x1b, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54,
|
||||
0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12,
|
||||
0x1f, 0x0a, 0x1b, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e,
|
||||
0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x01,
|
||||
0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45,
|
||||
0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45,
|
||||
0x44, 0x10, 0x02, 0x12, 0x1e, 0x0a, 0x1a, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f,
|
||||
0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45,
|
||||
0x44, 0x10, 0x03, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f,
|
||||
0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x41, 0x4e, 0x43, 0x45,
|
||||
0x4c, 0x4c, 0x45, 0x44, 0x10, 0x04, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52,
|
||||
0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x54, 0x49,
|
||||
0x4d, 0x45, 0x44, 0x5f, 0x4f, 0x55, 0x54, 0x10, 0x05, 0x12, 0x1e, 0x0a, 0x1a, 0x52, 0x45, 0x53,
|
||||
0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45,
|
||||
0x5f, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x10, 0x06, 0x2a, 0x3c, 0x0a, 0x14, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70,
|
||||
0x65, 0x12, 0x24, 0x0a, 0x20, 0x57, 0x4f, 0x52, 0x4b, 0x46, 0x4c, 0x4f, 0x57, 0x5f, 0x52, 0x55,
|
||||
0x4e, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x49, 0x4e,
|
||||
0x49, 0x53, 0x48, 0x45, 0x44, 0x10, 0x00, 0x32, 0xf8, 0x06, 0x0a, 0x0a, 0x44, 0x69, 0x73, 0x70,
|
||||
0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x08, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74,
|
||||
0x65, 0x72, 0x12, 0x16, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73,
|
||||
0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f,
|
||||
0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x33, 0x0a, 0x06, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x12,
|
||||
0x14, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64,
|
||||
0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, 0x30, 0x01, 0x12, 0x35, 0x0a, 0x08, 0x4c, 0x69,
|
||||
0x73, 0x74, 0x65, 0x6e, 0x56, 0x32, 0x12, 0x14, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c,
|
||||
0x69, 0x73, 0x74, 0x65, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x41,
|
||||
0x73, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, 0x30,
|
||||
0x01, 0x12, 0x34, 0x0a, 0x09, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x12, 0x11,
|
||||
0x2e, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x12, 0x2e, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x73,
|
||||
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x52, 0x0a, 0x19, 0x53, 0x75, 0x62, 0x73, 0x63,
|
||||
0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76,
|
||||
0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x2e, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65,
|
||||
0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12, 0x53, 0x0a, 0x17, 0x53,
|
||||
0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x52, 0x75, 0x6e, 0x73, 0x12, 0x1f, 0x2e, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69,
|
||||
0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x73,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x11, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x28, 0x01, 0x30, 0x01,
|
||||
0x12, 0x3f, 0x0a, 0x13, 0x53, 0x65, 0x6e, 0x64, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63, 0x74, 0x69,
|
||||
0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x10, 0x2e, 0x53, 0x74, 0x65, 0x70, 0x41, 0x63,
|
||||
0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x1a, 0x14, 0x2e, 0x41, 0x63, 0x74, 0x69,
|
||||
0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
|
||||
0x00, 0x12, 0x47, 0x0a, 0x17, 0x53, 0x65, 0x6e, 0x64, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65,
|
||||
0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x14, 0x2e, 0x47,
|
||||
0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65,
|
||||
0x6e, 0x74, 0x1a, 0x14, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74,
|
||||
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3c, 0x0a, 0x10, 0x50, 0x75,
|
||||
0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x12, 0x0e,
|
||||
0x2e, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x16,
|
||||
0x2e, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65,
|
||||
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x46, 0x0a, 0x0b, 0x55, 0x6e, 0x73, 0x75,
|
||||
0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x12, 0x19, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72,
|
||||
0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65,
|
||||
0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x55, 0x6e, 0x73, 0x75, 0x62,
|
||||
0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00,
|
||||
0x12, 0x43, 0x0a, 0x0e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f,
|
||||
0x75, 0x74, 0x12, 0x16, 0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65,
|
||||
0x6f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x52, 0x65, 0x66,
|
||||
0x88, 0x01, 0x01, 0x12, 0x23, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65,
|
||||
0x78, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x03, 0x48, 0x02, 0x52, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74,
|
||||
0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x73, 0x74, 0x65,
|
||||
0x70, 0x52, 0x65, 0x74, 0x72, 0x69, 0x65, 0x73, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x72, 0x65, 0x74,
|
||||
0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x0d, 0x0a, 0x0b, 0x5f, 0x65, 0x76, 0x65, 0x6e,
|
||||
0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0xdb, 0x01, 0x0a, 0x10, 0x57, 0x6f, 0x72, 0x6b, 0x66,
|
||||
0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x24, 0x0a, 0x0d, 0x77,
|
||||
0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x49,
|
||||
0x64, 0x12, 0x33, 0x0a, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x18, 0x02,
|
||||
0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52,
|
||||
0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x52, 0x09, 0x65, 0x76, 0x65,
|
||||
0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54,
|
||||
0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a,
|
||||
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
|
||||
0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e,
|
||||
0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x28, 0x0a, 0x07, 0x72, 0x65,
|
||||
0x73, 0x75, 0x6c, 0x74, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x53, 0x74,
|
||||
0x65, 0x70, 0x52, 0x75, 0x6e, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x52, 0x07, 0x72, 0x65, 0x73,
|
||||
0x75, 0x6c, 0x74, 0x73, 0x22, 0xbe, 0x01, 0x0a, 0x0d, 0x53, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e,
|
||||
0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75,
|
||||
0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52,
|
||||
0x75, 0x6e, 0x49, 0x64, 0x12, 0x26, 0x0a, 0x0e, 0x73, 0x74, 0x65, 0x70, 0x52, 0x65, 0x61, 0x64,
|
||||
0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x73, 0x74,
|
||||
0x65, 0x70, 0x52, 0x65, 0x61, 0x64, 0x61, 0x62, 0x6c, 0x65, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08,
|
||||
0x6a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
|
||||
0x6a, 0x6f, 0x62, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x19, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f,
|
||||
0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72,
|
||||
0x88, 0x01, 0x01, 0x12, 0x1b, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20,
|
||||
0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x88, 0x01, 0x01,
|
||||
0x42, 0x08, 0x0a, 0x06, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x42, 0x09, 0x0a, 0x07, 0x5f, 0x6f,
|
||||
0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0x7f, 0x0a, 0x0d, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64,
|
||||
0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75,
|
||||
0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52,
|
||||
0x75, 0x6e, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75,
|
||||
0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x26,
|
||||
0x0a, 0x0e, 0x63, 0x61, 0x6c, 0x6c, 0x65, 0x72, 0x46, 0x69, 0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65,
|
||||
0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x63, 0x61, 0x6c, 0x6c, 0x65, 0x72, 0x46, 0x69,
|
||||
0x6c, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x22, 0x17, 0x0a, 0x15, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69,
|
||||
0x64, 0x65, 0x73, 0x44, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
|
||||
0x6c, 0x0a, 0x10, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x77, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x49, 0x64, 0x12,
|
||||
0x3c, 0x0a, 0x0b, 0x68, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x41, 0x74, 0x18, 0x02,
|
||||
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70,
|
||||
0x52, 0x0b, 0x68, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x41, 0x74, 0x22, 0x13, 0x0a,
|
||||
0x11, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
|
||||
0x73, 0x65, 0x22, 0x65, 0x0a, 0x15, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d,
|
||||
0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73,
|
||||
0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x2e, 0x0a, 0x12, 0x69, 0x6e, 0x63,
|
||||
0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x42, 0x79, 0x18,
|
||||
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x69, 0x6e, 0x63, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74,
|
||||
0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x42, 0x79, 0x22, 0x52, 0x0a, 0x16, 0x52, 0x65, 0x66,
|
||||
0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f,
|
||||
0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3a, 0x0a, 0x0b, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65,
|
||||
0x53, 0x6c, 0x6f, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c,
|
||||
0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x52, 0x65, 0x6c, 0x65,
|
||||
0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
|
||||
0x00, 0x12, 0x4f, 0x0a, 0x12, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65,
|
||||
0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1a, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b,
|
||||
0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
|
||||
0x22, 0x00, 0x42, 0x47, 0x5a, 0x45, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d,
|
||||
0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74,
|
||||
0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65,
|
||||
0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65,
|
||||
0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x33,
|
||||
0x6e, 0x73, 0x65, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x41, 0x74,
|
||||
0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61,
|
||||
0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x41, 0x74, 0x22, 0x32, 0x0a,
|
||||
0x12, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64,
|
||||
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49,
|
||||
0x64, 0x22, 0x15, 0x0a, 0x13, 0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74,
|
||||
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2a, 0x37, 0x0a, 0x04, 0x53, 0x44, 0x4b, 0x53,
|
||||
0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x06, 0x0a,
|
||||
0x02, 0x47, 0x4f, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x59, 0x54, 0x48, 0x4f, 0x4e, 0x10,
|
||||
0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x54, 0x59, 0x50, 0x45, 0x53, 0x43, 0x52, 0x49, 0x50, 0x54, 0x10,
|
||||
0x03, 0x2a, 0x4e, 0x0a, 0x0a, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12,
|
||||
0x12, 0x0a, 0x0e, 0x53, 0x54, 0x41, 0x52, 0x54, 0x5f, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52, 0x55,
|
||||
0x4e, 0x10, 0x00, 0x12, 0x13, 0x0a, 0x0f, 0x43, 0x41, 0x4e, 0x43, 0x45, 0x4c, 0x5f, 0x53, 0x54,
|
||||
0x45, 0x50, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x01, 0x12, 0x17, 0x0a, 0x13, 0x53, 0x54, 0x41, 0x52,
|
||||
0x54, 0x5f, 0x47, 0x45, 0x54, 0x5f, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x10,
|
||||
0x02, 0x2a, 0xa2, 0x01, 0x0a, 0x17, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63,
|
||||
0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x20, 0x0a,
|
||||
0x1c, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54,
|
||||
0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12,
|
||||
0x20, 0x0a, 0x1c, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45, 0x56, 0x45,
|
||||
0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10,
|
||||
0x01, 0x12, 0x22, 0x0a, 0x1e, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x45,
|
||||
0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45,
|
||||
0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1f, 0x0a, 0x1b, 0x47, 0x52, 0x4f, 0x55, 0x50, 0x5f, 0x4b,
|
||||
0x45, 0x59, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41,
|
||||
0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x2a, 0xac, 0x01, 0x0a, 0x13, 0x53, 0x74, 0x65, 0x70, 0x41,
|
||||
0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b,
|
||||
0x0a, 0x17, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50,
|
||||
0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x1b, 0x0a, 0x17, 0x53,
|
||||
0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53,
|
||||
0x54, 0x41, 0x52, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x1d, 0x0a, 0x19, 0x53, 0x54, 0x45, 0x50,
|
||||
0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50,
|
||||
0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1a, 0x0a, 0x16, 0x53, 0x54, 0x45, 0x50, 0x5f,
|
||||
0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46, 0x41, 0x49, 0x4c, 0x45,
|
||||
0x44, 0x10, 0x03, 0x12, 0x20, 0x0a, 0x1c, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x45, 0x56, 0x45, 0x4e,
|
||||
0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x41, 0x43, 0x4b, 0x4e, 0x4f, 0x57, 0x4c, 0x45, 0x44,
|
||||
0x47, 0x45, 0x44, 0x10, 0x04, 0x2a, 0x65, 0x0a, 0x0c, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63,
|
||||
0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x19, 0x0a, 0x15, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43,
|
||||
0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00,
|
||||
0x12, 0x1a, 0x0a, 0x16, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50,
|
||||
0x45, 0x5f, 0x53, 0x54, 0x45, 0x50, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x01, 0x12, 0x1e, 0x0a, 0x1a,
|
||||
0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x57, 0x4f,
|
||||
0x52, 0x4b, 0x46, 0x4c, 0x4f, 0x57, 0x5f, 0x52, 0x55, 0x4e, 0x10, 0x02, 0x2a, 0xfe, 0x01, 0x0a,
|
||||
0x11, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x79,
|
||||
0x70, 0x65, 0x12, 0x1f, 0x0a, 0x1b, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45,
|
||||
0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57,
|
||||
0x4e, 0x10, 0x00, 0x12, 0x1f, 0x0a, 0x1b, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f,
|
||||
0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x41, 0x52, 0x54,
|
||||
0x45, 0x44, 0x10, 0x01, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45,
|
||||
0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43, 0x4f, 0x4d, 0x50,
|
||||
0x4c, 0x45, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x1e, 0x0a, 0x1a, 0x52, 0x45, 0x53, 0x4f, 0x55,
|
||||
0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x46,
|
||||
0x41, 0x49, 0x4c, 0x45, 0x44, 0x10, 0x03, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45, 0x53, 0x4f, 0x55,
|
||||
0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x43,
|
||||
0x41, 0x4e, 0x43, 0x45, 0x4c, 0x4c, 0x45, 0x44, 0x10, 0x04, 0x12, 0x21, 0x0a, 0x1d, 0x52, 0x45,
|
||||
0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50,
|
||||
0x45, 0x5f, 0x54, 0x49, 0x4d, 0x45, 0x44, 0x5f, 0x4f, 0x55, 0x54, 0x10, 0x05, 0x12, 0x1e, 0x0a,
|
||||
0x1a, 0x52, 0x45, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f,
|
||||
0x54, 0x59, 0x50, 0x45, 0x5f, 0x53, 0x54, 0x52, 0x45, 0x41, 0x4d, 0x10, 0x06, 0x2a, 0x3c, 0x0a,
|
||||
0x14, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e,
|
||||
0x74, 0x54, 0x79, 0x70, 0x65, 0x12, 0x24, 0x0a, 0x20, 0x57, 0x4f, 0x52, 0x4b, 0x46, 0x4c, 0x4f,
|
||||
0x57, 0x5f, 0x52, 0x55, 0x4e, 0x5f, 0x45, 0x56, 0x45, 0x4e, 0x54, 0x5f, 0x54, 0x59, 0x50, 0x45,
|
||||
0x5f, 0x46, 0x49, 0x4e, 0x49, 0x53, 0x48, 0x45, 0x44, 0x10, 0x00, 0x32, 0xf8, 0x06, 0x0a, 0x0a,
|
||||
0x44, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x12, 0x3d, 0x0a, 0x08, 0x52, 0x65,
|
||||
0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x12, 0x16, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52,
|
||||
0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17,
|
||||
0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x52,
|
||||
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x33, 0x0a, 0x06, 0x4c, 0x69, 0x73,
|
||||
0x74, 0x65, 0x6e, 0x12, 0x14, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74,
|
||||
0x65, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x41, 0x73, 0x73, 0x69,
|
||||
0x67, 0x6e, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x00, 0x30, 0x01, 0x12, 0x35,
|
||||
0x0a, 0x08, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x56, 0x32, 0x12, 0x14, 0x2e, 0x57, 0x6f, 0x72,
|
||||
0x6b, 0x65, 0x72, 0x4c, 0x69, 0x73, 0x74, 0x65, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x1a, 0x0f, 0x2e, 0x41, 0x73, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x41, 0x63, 0x74, 0x69, 0x6f,
|
||||
0x6e, 0x22, 0x00, 0x30, 0x01, 0x12, 0x34, 0x0a, 0x09, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65,
|
||||
0x61, 0x74, 0x12, 0x11, 0x2e, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61, 0x74, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x12, 0x2e, 0x48, 0x65, 0x61, 0x72, 0x74, 0x62, 0x65, 0x61,
|
||||
0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x52, 0x0a, 0x19, 0x53,
|
||||
0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c,
|
||||
0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x21, 0x2e, 0x53, 0x75, 0x62, 0x73, 0x63,
|
||||
0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76,
|
||||
0x65, 0x6e, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0e, 0x2e, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x30, 0x01, 0x12,
|
||||
0x53, 0x0a, 0x17, 0x53, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x73, 0x12, 0x1f, 0x2e, 0x53, 0x75, 0x62,
|
||||
0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x54, 0x6f, 0x57, 0x6f, 0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77,
|
||||
0x52, 0x75, 0x6e, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x11, 0x2e, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x66, 0x6c, 0x6f, 0x77, 0x52, 0x75, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00,
|
||||
0x28, 0x01, 0x30, 0x01, 0x12, 0x3f, 0x0a, 0x13, 0x53, 0x65, 0x6e, 0x64, 0x53, 0x74, 0x65, 0x70,
|
||||
0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x10, 0x2e, 0x53, 0x74,
|
||||
0x65, 0x70, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x1a, 0x14, 0x2e,
|
||||
0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f,
|
||||
0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x47, 0x0a, 0x17, 0x53, 0x65, 0x6e, 0x64, 0x47, 0x72, 0x6f,
|
||||
0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74,
|
||||
0x12, 0x14, 0x2e, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x4b, 0x65, 0x79, 0x41, 0x63, 0x74, 0x69, 0x6f,
|
||||
0x6e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x1a, 0x14, 0x2e, 0x41, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x45,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3c,
|
||||
0x0a, 0x10, 0x50, 0x75, 0x74, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61,
|
||||
0x74, 0x61, 0x12, 0x0e, 0x2e, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61,
|
||||
0x74, 0x61, 0x1a, 0x16, 0x2e, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x73, 0x44, 0x61,
|
||||
0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x46, 0x0a, 0x0b,
|
||||
0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x12, 0x19, 0x2e, 0x57, 0x6f,
|
||||
0x72, 0x6b, 0x65, 0x72, 0x55, 0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52,
|
||||
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1a, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x55,
|
||||
0x6e, 0x73, 0x75, 0x62, 0x73, 0x63, 0x72, 0x69, 0x62, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
|
||||
0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x0e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54,
|
||||
0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x16, 0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68,
|
||||
0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17,
|
||||
0x2e, 0x52, 0x65, 0x66, 0x72, 0x65, 0x73, 0x68, 0x54, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x52,
|
||||
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x3a, 0x0a, 0x0b, 0x52, 0x65, 0x6c,
|
||||
0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x6c, 0x65, 0x61,
|
||||
0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x14, 0x2e,
|
||||
0x52, 0x65, 0x6c, 0x65, 0x61, 0x73, 0x65, 0x53, 0x6c, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f,
|
||||
0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x4f, 0x0a, 0x12, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74, 0x57,
|
||||
0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1a, 0x2e, 0x55, 0x70,
|
||||
0x73, 0x65, 0x72, 0x74, 0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x55, 0x70, 0x73, 0x65, 0x72, 0x74,
|
||||
0x57, 0x6f, 0x72, 0x6b, 0x65, 0x72, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x52, 0x65, 0x73, 0x70,
|
||||
0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x47, 0x5a, 0x45, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
|
||||
0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76,
|
||||
0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61,
|
||||
0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x64, 0x69, 0x73, 0x70, 0x61,
|
||||
0x74, 0x63, 0x68, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62,
|
||||
0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
|
||||
@@ -27,6 +27,287 @@ import (
|
||||
tasktypes "github.com/hatchet-dev/hatchet/internal/services/shared/tasktypes/v1"
|
||||
)
|
||||
|
||||
type timeoutEvent struct {
|
||||
events []*contracts.WorkflowEvent
|
||||
timeoutAt time.Time
|
||||
}
|
||||
|
||||
type StreamEventBuffer struct {
|
||||
stepRunIdToWorkflowEvents map[string][]*contracts.WorkflowEvent
|
||||
stepRunIdToExpectedIndex map[string]int64
|
||||
stepRunIdToLastSeenTime map[string]time.Time
|
||||
stepRunIdToCompletionTime map[string]time.Time
|
||||
mu sync.Mutex
|
||||
timeoutDuration time.Duration
|
||||
gracePeriod time.Duration
|
||||
eventsChan chan *contracts.WorkflowEvent
|
||||
timedOutEventProducer chan timeoutEvent
|
||||
ctx context.Context
|
||||
cancel context.CancelFunc
|
||||
}
|
||||
|
||||
func NewStreamEventBuffer(timeout time.Duration) *StreamEventBuffer {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
buffer := &StreamEventBuffer{
|
||||
stepRunIdToWorkflowEvents: make(map[string][]*contracts.WorkflowEvent),
|
||||
stepRunIdToExpectedIndex: make(map[string]int64),
|
||||
stepRunIdToLastSeenTime: make(map[string]time.Time),
|
||||
stepRunIdToCompletionTime: make(map[string]time.Time),
|
||||
timeoutDuration: timeout,
|
||||
gracePeriod: 2 * time.Second, // Wait 2 seconds after completion for late events
|
||||
eventsChan: make(chan *contracts.WorkflowEvent, 100),
|
||||
timedOutEventProducer: make(chan timeoutEvent, 100),
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
}
|
||||
|
||||
go buffer.processTimeoutEvents()
|
||||
go buffer.periodicCleanup()
|
||||
|
||||
return buffer
|
||||
}
|
||||
|
||||
func isTerminalEvent(event *contracts.WorkflowEvent) bool {
|
||||
if event == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return event.ResourceType == contracts.ResourceType_RESOURCE_TYPE_STEP_RUN &&
|
||||
(event.EventType == contracts.ResourceEventType_RESOURCE_EVENT_TYPE_COMPLETED ||
|
||||
event.EventType == contracts.ResourceEventType_RESOURCE_EVENT_TYPE_FAILED ||
|
||||
event.EventType == contracts.ResourceEventType_RESOURCE_EVENT_TYPE_CANCELLED)
|
||||
}
|
||||
|
||||
func sortByEventIndex(a, b *contracts.WorkflowEvent) int {
|
||||
if a.EventIndex == nil && b.EventIndex == nil {
|
||||
if a.EventTimestamp.AsTime().Before(b.EventTimestamp.AsTime()) {
|
||||
return -1
|
||||
}
|
||||
|
||||
if a.EventTimestamp.AsTime().After(b.EventTimestamp.AsTime()) {
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
if *a.EventIndex < *b.EventIndex {
|
||||
return -1
|
||||
}
|
||||
|
||||
if *a.EventIndex > *b.EventIndex {
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
func (b *StreamEventBuffer) processTimeoutEvents() {
|
||||
for {
|
||||
select {
|
||||
case <-b.ctx.Done():
|
||||
return
|
||||
case timeoutEvent := <-b.timedOutEventProducer:
|
||||
timer := time.NewTimer(time.Until(timeoutEvent.timeoutAt))
|
||||
|
||||
select {
|
||||
case <-b.ctx.Done():
|
||||
timer.Stop()
|
||||
return
|
||||
case <-timer.C:
|
||||
b.mu.Lock()
|
||||
for _, event := range timeoutEvent.events {
|
||||
stepRunId := event.ResourceId
|
||||
|
||||
if bufferedEvents, exists := b.stepRunIdToWorkflowEvents[stepRunId]; exists {
|
||||
for _, e := range bufferedEvents {
|
||||
select {
|
||||
case b.eventsChan <- e:
|
||||
case <-b.ctx.Done():
|
||||
b.mu.Unlock()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
delete(b.stepRunIdToWorkflowEvents, stepRunId)
|
||||
delete(b.stepRunIdToLastSeenTime, stepRunId)
|
||||
b.stepRunIdToExpectedIndex[stepRunId] = -1
|
||||
}
|
||||
}
|
||||
b.mu.Unlock()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *StreamEventBuffer) Events() <-chan *contracts.WorkflowEvent {
|
||||
return b.eventsChan
|
||||
}
|
||||
|
||||
func (b *StreamEventBuffer) Close() {
|
||||
b.cancel()
|
||||
close(b.eventsChan)
|
||||
close(b.timedOutEventProducer)
|
||||
}
|
||||
|
||||
func (b *StreamEventBuffer) periodicCleanup() {
|
||||
ticker := time.NewTicker(time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-b.ctx.Done():
|
||||
return
|
||||
case <-ticker.C:
|
||||
b.mu.Lock()
|
||||
now := time.Now()
|
||||
|
||||
for stepRunId, completionTime := range b.stepRunIdToCompletionTime {
|
||||
if now.Sub(completionTime) > b.gracePeriod {
|
||||
delete(b.stepRunIdToWorkflowEvents, stepRunId)
|
||||
delete(b.stepRunIdToExpectedIndex, stepRunId)
|
||||
delete(b.stepRunIdToLastSeenTime, stepRunId)
|
||||
delete(b.stepRunIdToCompletionTime, stepRunId)
|
||||
}
|
||||
}
|
||||
|
||||
b.mu.Unlock()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *StreamEventBuffer) AddEvent(event *contracts.WorkflowEvent) {
|
||||
b.mu.Lock()
|
||||
defer b.mu.Unlock()
|
||||
|
||||
stepRunId := event.ResourceId
|
||||
now := time.Now()
|
||||
|
||||
if event.ResourceType != contracts.ResourceType_RESOURCE_TYPE_STEP_RUN ||
|
||||
event.EventType != contracts.ResourceEventType_RESOURCE_EVENT_TYPE_STREAM {
|
||||
|
||||
if isTerminalEvent(event) {
|
||||
if events, exists := b.stepRunIdToWorkflowEvents[stepRunId]; exists && len(events) > 0 {
|
||||
slices.SortFunc(events, sortByEventIndex)
|
||||
|
||||
for _, e := range events {
|
||||
select {
|
||||
case b.eventsChan <- e:
|
||||
case <-b.ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
delete(b.stepRunIdToWorkflowEvents, stepRunId)
|
||||
delete(b.stepRunIdToExpectedIndex, stepRunId)
|
||||
delete(b.stepRunIdToLastSeenTime, stepRunId)
|
||||
}
|
||||
|
||||
b.stepRunIdToCompletionTime[stepRunId] = now
|
||||
}
|
||||
|
||||
select {
|
||||
case b.eventsChan <- event:
|
||||
case <-b.ctx.Done():
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
b.stepRunIdToLastSeenTime[stepRunId] = now
|
||||
|
||||
if _, exists := b.stepRunIdToExpectedIndex[stepRunId]; !exists {
|
||||
// IMPORTANT: Events are zero-indexed
|
||||
b.stepRunIdToExpectedIndex[stepRunId] = 0
|
||||
}
|
||||
|
||||
// If EventIndex is nil, don't buffer - just release the event immediately
|
||||
if event.EventIndex == nil {
|
||||
select {
|
||||
case b.eventsChan <- event:
|
||||
case <-b.ctx.Done():
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
expectedIndex := b.stepRunIdToExpectedIndex[stepRunId]
|
||||
|
||||
// IMPORTANT: if expected index is -1, it means we're starting fresh after a timeout
|
||||
if expectedIndex == -1 && event.EventIndex != nil {
|
||||
b.stepRunIdToExpectedIndex[stepRunId] = *event.EventIndex
|
||||
expectedIndex = *event.EventIndex
|
||||
}
|
||||
|
||||
// For stream events: if this event is the next expected one, send it immediately
|
||||
// Only buffer if it's out of order
|
||||
if *event.EventIndex == expectedIndex {
|
||||
if bufferedEvents, exists := b.stepRunIdToWorkflowEvents[stepRunId]; exists && len(bufferedEvents) > 0 {
|
||||
b.stepRunIdToWorkflowEvents[stepRunId] = append(bufferedEvents, event)
|
||||
slices.SortFunc(b.stepRunIdToWorkflowEvents[stepRunId], sortByEventIndex)
|
||||
|
||||
b.sendReadyEvents(stepRunId)
|
||||
} else {
|
||||
b.stepRunIdToExpectedIndex[stepRunId] = expectedIndex + 1
|
||||
select {
|
||||
case b.eventsChan <- event:
|
||||
case <-b.ctx.Done():
|
||||
return
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if _, exists := b.stepRunIdToWorkflowEvents[stepRunId]; !exists {
|
||||
b.stepRunIdToWorkflowEvents[stepRunId] = make([]*contracts.WorkflowEvent, 0)
|
||||
}
|
||||
|
||||
b.stepRunIdToWorkflowEvents[stepRunId] = append(b.stepRunIdToWorkflowEvents[stepRunId], event)
|
||||
slices.SortFunc(b.stepRunIdToWorkflowEvents[stepRunId], sortByEventIndex)
|
||||
|
||||
b.sendReadyEvents(stepRunId)
|
||||
|
||||
b.scheduleTimeoutIfNeeded(stepRunId, now)
|
||||
}
|
||||
|
||||
func (b *StreamEventBuffer) scheduleTimeoutIfNeeded(stepRunId string, eventTime time.Time) {
|
||||
if events, exists := b.stepRunIdToWorkflowEvents[stepRunId]; exists && len(events) > 0 {
|
||||
timeoutAt := eventTime.Add(b.timeoutDuration)
|
||||
|
||||
timeoutEvent := timeoutEvent{
|
||||
events: append([]*contracts.WorkflowEvent{}, events...),
|
||||
timeoutAt: timeoutAt,
|
||||
}
|
||||
|
||||
select {
|
||||
case b.timedOutEventProducer <- timeoutEvent:
|
||||
case <-b.ctx.Done():
|
||||
return
|
||||
default:
|
||||
// If the channel is full, we skip this timeout scheduling
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (b *StreamEventBuffer) sendReadyEvents(stepRunId string) {
|
||||
events := b.stepRunIdToWorkflowEvents[stepRunId]
|
||||
expectedIdx := b.stepRunIdToExpectedIndex[stepRunId]
|
||||
|
||||
for len(events) > 0 && events[0].EventIndex != nil && *events[0].EventIndex == expectedIdx {
|
||||
select {
|
||||
case b.eventsChan <- events[0]:
|
||||
case <-b.ctx.Done():
|
||||
return
|
||||
}
|
||||
events = events[1:]
|
||||
expectedIdx++
|
||||
}
|
||||
|
||||
b.stepRunIdToWorkflowEvents[stepRunId] = events
|
||||
b.stepRunIdToExpectedIndex[stepRunId] = expectedIdx
|
||||
}
|
||||
|
||||
// SubscribeToWorkflowEvents registers workflow events with the dispatcher
|
||||
func (s *DispatcherImpl) subscribeToWorkflowRunsV1(server contracts.Dispatcher_SubscribeToWorkflowRunsServer) error {
|
||||
tenant := server.Context().Value("tenant").(*dbsqlc.Tenant)
|
||||
@@ -558,6 +839,38 @@ func (s *DispatcherImpl) subscribeToWorkflowEventsByWorkflowRunIdV1(workflowRunI
|
||||
var mu sync.Mutex // Mutex to protect activeRunIds
|
||||
var sendMu sync.Mutex // Mutex to protect sending messages
|
||||
|
||||
streamBuffer := NewStreamEventBuffer(5 * time.Second)
|
||||
defer streamBuffer.Close()
|
||||
|
||||
// Handle events from the stream buffer
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case event, ok := <-streamBuffer.Events():
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
|
||||
sendMu.Lock()
|
||||
err := stream.Send(event)
|
||||
sendMu.Unlock()
|
||||
|
||||
if err != nil {
|
||||
s.l.Error().Err(err).Msgf("could not send workflow event to client")
|
||||
cancel()
|
||||
return
|
||||
}
|
||||
|
||||
if event.Hangup {
|
||||
cancel()
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
f := func(tenantId, msgId string, payloads [][]byte) error {
|
||||
wg.Add(1)
|
||||
defer wg.Done()
|
||||
@@ -631,19 +944,7 @@ func (s *DispatcherImpl) subscribeToWorkflowEventsByWorkflowRunIdV1(workflowRunI
|
||||
|
||||
// send the task to the client
|
||||
for _, e := range events {
|
||||
sendMu.Lock()
|
||||
err = stream.Send(e)
|
||||
sendMu.Unlock()
|
||||
|
||||
if err != nil {
|
||||
cancel()
|
||||
s.l.Error().Err(err).Msgf("could not send workflow event to client")
|
||||
return nil
|
||||
}
|
||||
|
||||
if e.Hangup {
|
||||
cancel()
|
||||
}
|
||||
streamBuffer.AddEvent(e)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -924,6 +1225,7 @@ func (s *DispatcherImpl) msgsToWorkflowEvent(msgId string, payloads [][]byte, fi
|
||||
EventType: contracts.ResourceEventType_RESOURCE_EVENT_TYPE_STREAM,
|
||||
EventTimestamp: timestamppb.New(payload.CreatedAt),
|
||||
EventPayload: string(payload.Payload),
|
||||
EventIndex: payload.EventIndex,
|
||||
})
|
||||
}
|
||||
case "workflow-run-finished":
|
||||
@@ -972,15 +1274,7 @@ func (s *DispatcherImpl) msgsToWorkflowEvent(msgId string, payloads [][]byte, fi
|
||||
return -1
|
||||
}
|
||||
|
||||
if a.EventTimestamp.AsTime().Before(b.EventTimestamp.AsTime()) {
|
||||
return -1
|
||||
}
|
||||
|
||||
if a.EventTimestamp.AsTime().After(b.EventTimestamp.AsTime()) {
|
||||
return 1
|
||||
}
|
||||
|
||||
return 0
|
||||
return sortByEventIndex(a, b)
|
||||
})
|
||||
|
||||
return matches, nil
|
||||
|
||||
301
internal/services/dispatcher/stream_event_buffer_test.go
Normal file
301
internal/services/dispatcher/stream_event_buffer_test.go
Normal file
@@ -0,0 +1,301 @@
|
||||
//go:build !e2e && !load && !rampup && !integration
|
||||
|
||||
package dispatcher
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"google.golang.org/protobuf/types/known/timestamppb"
|
||||
)
|
||||
|
||||
func genEvent(payload string, hangup bool, eventIndex *int64) *contracts.WorkflowEvent {
|
||||
return &contracts.WorkflowEvent{
|
||||
WorkflowRunId: "test-run-id",
|
||||
ResourceId: "test-step-run-id",
|
||||
ResourceType: contracts.ResourceType_RESOURCE_TYPE_STEP_RUN,
|
||||
EventType: contracts.ResourceEventType_RESOURCE_EVENT_TYPE_STREAM,
|
||||
EventTimestamp: timestamppb.Now(),
|
||||
EventPayload: payload,
|
||||
Hangup: hangup,
|
||||
EventIndex: eventIndex,
|
||||
}
|
||||
}
|
||||
|
||||
func TestStreamBuffer_BasicEventRelease(t *testing.T) {
|
||||
buffer := NewStreamEventBuffer(5 * time.Second)
|
||||
defer buffer.Close()
|
||||
|
||||
ix := int64(0)
|
||||
|
||||
event := genEvent("test_payload", false, &ix)
|
||||
|
||||
buffer.AddEvent(event)
|
||||
|
||||
select {
|
||||
case receivedEvent := <-buffer.Events():
|
||||
assert.Equal(t, event, receivedEvent)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatal("Expected event was not received")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStreamBuffer_OutOfOrderRelease(t *testing.T) {
|
||||
buffer := NewStreamEventBuffer(5 * time.Second)
|
||||
defer buffer.Close()
|
||||
|
||||
ix0 := int64(0)
|
||||
ix1 := int64(1)
|
||||
ix2 := int64(2)
|
||||
|
||||
event2 := genEvent("test_payload", false, &ix1)
|
||||
|
||||
buffer.AddEvent(event2)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
event3 := genEvent("test_payload", false, &ix2)
|
||||
buffer.AddEvent(event3)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
event1 := genEvent("test_payload", false, &ix0)
|
||||
buffer.AddEvent(event1)
|
||||
|
||||
receivedEvents := make([]*contracts.WorkflowEvent, 0, 3)
|
||||
for i := 0; i < 3; i++ {
|
||||
select {
|
||||
case event := <-buffer.Events():
|
||||
receivedEvents = append(receivedEvents, event)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatalf("Expected to receive event %d", i)
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, 3, len(receivedEvents))
|
||||
assert.Equal(t, event1, receivedEvents[0])
|
||||
assert.Equal(t, event2, receivedEvents[1])
|
||||
assert.Equal(t, event3, receivedEvents[2])
|
||||
}
|
||||
|
||||
func TestStreamBuffer_Timeout(t *testing.T) {
|
||||
buffer := NewStreamEventBuffer(1 * time.Second)
|
||||
defer buffer.Close()
|
||||
|
||||
ix1 := int64(1)
|
||||
ix2 := int64(2)
|
||||
ix0 := int64(0)
|
||||
|
||||
event2 := genEvent("test_payload", false, &ix1)
|
||||
buffer.AddEvent(event2)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
event3 := genEvent("test_payload", false, &ix2)
|
||||
buffer.AddEvent(event3)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
time.Sleep(2 * time.Second)
|
||||
|
||||
receivedEvents := make([]*contracts.WorkflowEvent, 0, 2)
|
||||
for i := 0; i < 2; i++ {
|
||||
select {
|
||||
case event := <-buffer.Events():
|
||||
receivedEvents = append(receivedEvents, event)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatalf("Expected to receive timed out event %d", i)
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, 2, len(receivedEvents))
|
||||
assert.Equal(t, event2, receivedEvents[0])
|
||||
assert.Equal(t, event3, receivedEvents[1])
|
||||
|
||||
event1 := genEvent("test_payload", false, &ix0)
|
||||
buffer.AddEvent(event1)
|
||||
|
||||
// This should be released immediately (fresh sequence after timeout)
|
||||
select {
|
||||
case receivedEvent := <-buffer.Events():
|
||||
assert.Equal(t, event1, receivedEvent)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatal("Expected event was not received")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStreamBuffer_TimeoutWithSubsequentOrdering(t *testing.T) {
|
||||
buffer := NewStreamEventBuffer(500 * time.Millisecond)
|
||||
defer buffer.Close()
|
||||
|
||||
ix1 := int64(1)
|
||||
ix2 := int64(2)
|
||||
ix5 := int64(5)
|
||||
ix6 := int64(6)
|
||||
|
||||
event1 := genEvent("payload1", false, &ix1)
|
||||
buffer.AddEvent(event1)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
event2 := genEvent("payload2", false, &ix2)
|
||||
buffer.AddEvent(event2)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
time.Sleep(1 * time.Second)
|
||||
|
||||
receivedEvents := make([]*contracts.WorkflowEvent, 0, 2)
|
||||
for i := 0; i < 2; i++ {
|
||||
select {
|
||||
case event := <-buffer.Events():
|
||||
receivedEvents = append(receivedEvents, event)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatalf("Expected to receive timed out event %d", i)
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, 2, len(receivedEvents))
|
||||
assert.Equal(t, event1, receivedEvents[0])
|
||||
assert.Equal(t, event2, receivedEvents[1])
|
||||
|
||||
// Now start a new sequence - event 5 should start a fresh sequence
|
||||
event5 := genEvent("payload5", false, &ix5)
|
||||
buffer.AddEvent(event5)
|
||||
|
||||
select {
|
||||
case receivedEvent := <-buffer.Events():
|
||||
assert.Equal(t, event5, receivedEvent)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatal("Expected event was not received")
|
||||
}
|
||||
|
||||
// Event 6 should be released immediately as it's the next in sequence
|
||||
event6 := genEvent("payload6", false, &ix6)
|
||||
buffer.AddEvent(event6)
|
||||
|
||||
select {
|
||||
case receivedEvent := <-buffer.Events():
|
||||
assert.Equal(t, event6, receivedEvent)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatal("Expected event was not received")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStreamBuffer_HangupHandling(t *testing.T) {
|
||||
buffer := NewStreamEventBuffer(500 * time.Millisecond)
|
||||
defer buffer.Close()
|
||||
|
||||
ix0 := int64(0)
|
||||
ix1 := int64(1)
|
||||
ix2 := int64(2)
|
||||
ix3 := int64(3)
|
||||
|
||||
event2 := genEvent("first-event", false, &ix1)
|
||||
event3 := genEvent("second-event", false, &ix2)
|
||||
|
||||
buffer.AddEvent(event2)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
buffer.AddEvent(event3)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
eventHangup := genEvent("hangup-event", true, &ix3)
|
||||
buffer.AddEvent(eventHangup)
|
||||
|
||||
select {
|
||||
case <-buffer.Events():
|
||||
t.Fatal("Should not receive out-of-order event")
|
||||
case <-time.After(100 * time.Millisecond):
|
||||
// Expected - no event should be received
|
||||
}
|
||||
|
||||
event0 := genEvent("first-event", false, &ix0)
|
||||
buffer.AddEvent(event0)
|
||||
|
||||
receivedEvents := make([]*contracts.WorkflowEvent, 0, 4)
|
||||
for i := 0; i < 4; i++ {
|
||||
select {
|
||||
case event := <-buffer.Events():
|
||||
receivedEvents = append(receivedEvents, event)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatalf("Expected to receive event %d", i)
|
||||
}
|
||||
}
|
||||
|
||||
assert.Equal(t, 4, len(receivedEvents))
|
||||
assert.Equal(t, event0, receivedEvents[0])
|
||||
assert.Equal(t, event2, receivedEvents[1])
|
||||
assert.Equal(t, event3, receivedEvents[2])
|
||||
assert.Equal(t, eventHangup, receivedEvents[3])
|
||||
}
|
||||
|
||||
func TestStreamBuffer_NoIndexSent(t *testing.T) {
|
||||
buffer := NewStreamEventBuffer(500 * time.Millisecond)
|
||||
defer buffer.Close()
|
||||
|
||||
event1 := genEvent("first-event", false, nil)
|
||||
event2 := genEvent("second-event", false, nil)
|
||||
|
||||
buffer.AddEvent(event2)
|
||||
|
||||
select {
|
||||
case receivedEvent := <-buffer.Events():
|
||||
assert.Equal(t, event2, receivedEvent)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatal("Expected event was not received")
|
||||
}
|
||||
|
||||
buffer.AddEvent(event1)
|
||||
|
||||
select {
|
||||
case receivedEvent := <-buffer.Events():
|
||||
assert.Equal(t, event1, receivedEvent)
|
||||
case <-time.After(1 * time.Second):
|
||||
t.Fatal("Expected event was not received")
|
||||
}
|
||||
}
|
||||
@@ -313,7 +313,8 @@ type PutStreamEventRequest struct {
|
||||
// the stream event message
|
||||
Message []byte `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"`
|
||||
// associated stream event metadata
|
||||
Metadata string `protobuf:"bytes,5,opt,name=metadata,proto3" json:"metadata,omitempty"`
|
||||
Metadata string `protobuf:"bytes,5,opt,name=metadata,proto3" json:"metadata,omitempty"`
|
||||
EventIndex *int64 `protobuf:"varint,6,opt,name=eventIndex,proto3,oneof" json:"eventIndex,omitempty"`
|
||||
}
|
||||
|
||||
func (x *PutStreamEventRequest) Reset() {
|
||||
@@ -376,6 +377,13 @@ func (x *PutStreamEventRequest) GetMetadata() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *PutStreamEventRequest) GetEventIndex() int64 {
|
||||
if x != nil && x.EventIndex != nil {
|
||||
return *x.EventIndex
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
type PutStreamEventResponse struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
@@ -645,7 +653,7 @@ var file_events_proto_rawDesc = []byte{
|
||||
0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x42, 0x11, 0x0a, 0x0f,
|
||||
0x5f, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22,
|
||||
0x10, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
|
||||
0x65, 0x22, 0xa5, 0x01, 0x0a, 0x15, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45,
|
||||
0x65, 0x22, 0xd9, 0x01, 0x0a, 0x15, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73,
|
||||
0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
|
||||
0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x72, 0x65,
|
||||
@@ -655,56 +663,59 @@ var file_events_proto_rawDesc = []byte{
|
||||
0x64, 0x41, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03,
|
||||
0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1a, 0x0a,
|
||||
0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x18, 0x0a, 0x16, 0x50, 0x75, 0x74,
|
||||
0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f,
|
||||
0x6e, 0x73, 0x65, 0x22, 0x41, 0x0a, 0x14, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x29, 0x0a, 0x06, 0x65,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x50, 0x75,
|
||||
0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x06,
|
||||
0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xa1, 0x02, 0x0a, 0x10, 0x50, 0x75, 0x73, 0x68, 0x45,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b,
|
||||
0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a,
|
||||
0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07,
|
||||
0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74,
|
||||
0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32,
|
||||
0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75,
|
||||
0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65,
|
||||
0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x33, 0x0a, 0x12, 0x61,
|
||||
0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x23, 0x0a, 0x0a, 0x65, 0x76, 0x65,
|
||||
0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x48, 0x00, 0x52,
|
||||
0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x42, 0x0d,
|
||||
0x0a, 0x0b, 0x5f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x22, 0x18, 0x0a,
|
||||
0x16, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52,
|
||||
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x41, 0x0a, 0x14, 0x42, 0x75, 0x6c, 0x6b, 0x50,
|
||||
0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
|
||||
0x29, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32,
|
||||
0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65,
|
||||
0x73, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xa1, 0x02, 0x0a, 0x10, 0x50,
|
||||
0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
|
||||
0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65,
|
||||
0x79, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20,
|
||||
0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52,
|
||||
0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12,
|
||||
0x33, 0x0a, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74,
|
||||
0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x12, 0x61,
|
||||
0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74,
|
||||
0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74,
|
||||
0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01,
|
||||
0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x05, 0x20, 0x01,
|
||||
0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01,
|
||||
0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09,
|
||||
0x48, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x88, 0x01, 0x01, 0x42, 0x15, 0x0a, 0x13,
|
||||
0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64,
|
||||
0x61, 0x74, 0x61, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79,
|
||||
0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x22, 0x2e, 0x0a, 0x12, 0x52, 0x65,
|
||||
0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x12, 0x18, 0x0a, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x32, 0x88, 0x02, 0x0a, 0x0d, 0x45,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x23, 0x0a, 0x04,
|
||||
0x50, 0x75, 0x73, 0x68, 0x12, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74,
|
||||
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22,
|
||||
0x00, 0x12, 0x2c, 0x0a, 0x08, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x12, 0x15, 0x2e,
|
||||
0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71,
|
||||
0x75, 0x65, 0x73, 0x74, 0x1a, 0x07, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x00, 0x12,
|
||||
0x32, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x45,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65,
|
||||
0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e,
|
||||
0x74, 0x22, 0x00, 0x12, 0x2b, 0x0a, 0x06, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x12, 0x0e, 0x2e,
|
||||
0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e,
|
||||
0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00,
|
||||
0x12, 0x43, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65,
|
||||
0x6e, 0x74, 0x12, 0x16, 0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76,
|
||||
0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x50, 0x75, 0x74,
|
||||
0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f,
|
||||
0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x45, 0x5a, 0x43, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e,
|
||||
0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f,
|
||||
0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c,
|
||||
0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x69, 0x6e, 0x67, 0x65, 0x73, 0x74,
|
||||
0x6f, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x33,
|
||||
0x61, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79,
|
||||
0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69,
|
||||
0x74, 0x79, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x06,
|
||||
0x20, 0x01, 0x28, 0x09, 0x48, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x88, 0x01, 0x01,
|
||||
0x42, 0x15, 0x0a, 0x13, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d,
|
||||
0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x69, 0x6f,
|
||||
0x72, 0x69, 0x74, 0x79, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x22, 0x2e,
|
||||
0x0a, 0x12, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71,
|
||||
0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x32, 0x88,
|
||||
0x02, 0x0a, 0x0d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
|
||||
0x12, 0x23, 0x0a, 0x04, 0x50, 0x75, 0x73, 0x68, 0x12, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45,
|
||||
0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76,
|
||||
0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2c, 0x0a, 0x08, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73,
|
||||
0x68, 0x12, 0x15, 0x2e, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e,
|
||||
0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x07, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74,
|
||||
0x73, 0x22, 0x00, 0x12, 0x32, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x53, 0x69, 0x6e,
|
||||
0x67, 0x6c, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61,
|
||||
0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e,
|
||||
0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2b, 0x0a, 0x06, 0x50, 0x75, 0x74, 0x4c, 0x6f,
|
||||
0x67, 0x12, 0x0e, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x0f, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
|
||||
0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61,
|
||||
0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x16, 0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65,
|
||||
0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17,
|
||||
0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52,
|
||||
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x45, 0x5a, 0x43, 0x67, 0x69, 0x74,
|
||||
0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d,
|
||||
0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65,
|
||||
0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x69, 0x6e,
|
||||
0x67, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73,
|
||||
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -873,6 +884,7 @@ func file_events_proto_init() {
|
||||
}
|
||||
file_events_proto_msgTypes[0].OneofWrappers = []interface{}{}
|
||||
file_events_proto_msgTypes[2].OneofWrappers = []interface{}{}
|
||||
file_events_proto_msgTypes[4].OneofWrappers = []interface{}{}
|
||||
file_events_proto_msgTypes[7].OneofWrappers = []interface{}{}
|
||||
type x struct{}
|
||||
out := protoimpl.TypeBuilder{
|
||||
|
||||
@@ -37,6 +37,7 @@ func (i *IngestorImpl) putStreamEventV1(ctx context.Context, tenant *dbsqlc.Tena
|
||||
StepRunId: req.StepRunId,
|
||||
CreatedAt: req.CreatedAt.AsTime(),
|
||||
Payload: req.Message,
|
||||
EventIndex: req.EventIndex,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -32,4 +32,5 @@ type StreamEventPayload struct {
|
||||
CreatedAt time.Time `json:"created_at" validate:"required"`
|
||||
Payload []byte `json:"payload"`
|
||||
RetryCount *int32 `json:"retry_count,omitempty"`
|
||||
EventIndex *int64 `json:"event_index"`
|
||||
}
|
||||
|
||||
@@ -5,7 +5,26 @@ All notable changes to Hatchet's Python SDK will be documented in this changelog
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [1.12.2] - 2025-06-25
|
||||
## [1.13.0] - 2025-06-25
|
||||
|
||||
### Added
|
||||
|
||||
- Documentation for the `Context` classes
|
||||
- Allows for a worker to be terminated after a certain number of tasks by providing the `terminate_worker_after_num_tasks` config option
|
||||
|
||||
### Changed
|
||||
|
||||
- Adds a number of helpful Ruff linting rules
|
||||
- `DedupeViolationErr` is now `DedupeViolationError`
|
||||
- Fixed events documentation to correctly have a skipped run example.
|
||||
- Changed default arguments to many methods from mutable defaults like `[]` to None
|
||||
- Changes `JSONSerializableMapping` from `Mapping` to `dict`
|
||||
- Handles some potential bugs related to `asyncio` tasks being garbage collected.
|
||||
- Improves exception printing with an `ExceptionGroup` implementation
|
||||
- Fixes a bug with namespacing of user event conditions where the namespace was not respected so the task waiting for it would hang
|
||||
- Fixes a memory leak in streaming and logging, and fixes some issues with log capture.
|
||||
|
||||
## [1.12.3] - 2025-06-25
|
||||
|
||||
### Changed
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import re
|
||||
from collections.abc import Callable
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
|
||||
def prepend_import(content: str, import_statement: str) -> str:
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from collections.abc import AsyncGenerator, Generator
|
||||
from subprocess import Popen
|
||||
from typing import AsyncGenerator, Generator, cast
|
||||
from typing import cast
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
47
sdks/python/docs/context.md
Normal file
47
sdks/python/docs/context.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# Context
|
||||
|
||||
The Hatchet Context class provides helper methods and useful data to tasks at runtime. It is passed as the second argument to all tasks and durable tasks.
|
||||
|
||||
There are two types of context classes you'll encounter:
|
||||
|
||||
* `Context` - The standard context for regular tasks with methods for logging, task output retrieval, cancellation, and more
|
||||
* `DurableContext` - An extended context for durable tasks that includes additional methods for durable execution like `aio_wait_for` and `aio_sleep_for`
|
||||
|
||||
|
||||
## Context
|
||||
|
||||
::: context.context.Context
|
||||
options:
|
||||
inherited_members: false
|
||||
members:
|
||||
- was_skipped
|
||||
- task_output
|
||||
- was_triggered_by_event
|
||||
- workflow_input
|
||||
- lifespan
|
||||
- workflow_run_id
|
||||
- cancel
|
||||
- aio_cancel
|
||||
- done
|
||||
- log
|
||||
- release_slot
|
||||
- put_stream
|
||||
- refresh_timeout
|
||||
- retry_count
|
||||
- attempt_number
|
||||
- additional_metadata
|
||||
- parent_workflow_run_id
|
||||
- priority
|
||||
- workflow_id
|
||||
- workflow_version_id
|
||||
- task_run_errors
|
||||
- fetch_task_run_error
|
||||
|
||||
## DurableContext
|
||||
|
||||
::: context.context.DurableContext
|
||||
options:
|
||||
inherited_members: true
|
||||
members:
|
||||
- aio_wait_for
|
||||
- aio_sleep_for
|
||||
@@ -34,7 +34,6 @@ def step1(input: WorkflowInput, ctx: Context) -> None:
|
||||
print("starting step1")
|
||||
time.sleep(2)
|
||||
print("finished step1")
|
||||
pass
|
||||
|
||||
|
||||
def main() -> None:
|
||||
|
||||
@@ -3,7 +3,7 @@ from datetime import timedelta
|
||||
from typing import Any
|
||||
|
||||
from hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions
|
||||
from hatchet_sdk.clients.admin import DedupeViolationErr
|
||||
from hatchet_sdk.exceptions import DedupeViolationError
|
||||
|
||||
hatchet = Hatchet(debug=True)
|
||||
|
||||
@@ -20,15 +20,13 @@ async def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:
|
||||
for i in range(2):
|
||||
try:
|
||||
results.append(
|
||||
(
|
||||
dedupe_child_wf.aio_run(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata={"dedupe": "test"}, key=f"child{i}"
|
||||
),
|
||||
)
|
||||
dedupe_child_wf.aio_run(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata={"dedupe": "test"}, key=f"child{i}"
|
||||
),
|
||||
)
|
||||
)
|
||||
except DedupeViolationErr as e:
|
||||
except DedupeViolationError as e:
|
||||
print(f"dedupe violation {e}")
|
||||
continue
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
@@ -7,10 +6,6 @@ from examples.durable.worker import EVENT_KEY, SLEEP_TIME, durable_workflow
|
||||
from hatchet_sdk import Hatchet
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
os.getenv("CI", "false").lower() == "true",
|
||||
reason="Skipped in CI because of unreliability",
|
||||
)
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_durable(hatchet: Hatchet) -> None:
|
||||
ref = durable_workflow.run_no_wait()
|
||||
@@ -28,6 +23,12 @@ async def test_durable(hatchet: Hatchet) -> None:
|
||||
active_workers = [w for w in workers.rows if w.status == "ACTIVE"]
|
||||
|
||||
assert len(active_workers) == 2
|
||||
assert any(w.name == "e2e-test-worker" for w in active_workers)
|
||||
assert any(w.name.endswith("e2e-test-worker_durable") for w in active_workers)
|
||||
assert any(
|
||||
w.name == hatchet.config.apply_namespace("e2e-test-worker")
|
||||
for w in active_workers
|
||||
)
|
||||
assert any(
|
||||
w.name == hatchet.config.apply_namespace("e2e-test-worker_durable")
|
||||
for w in active_workers
|
||||
)
|
||||
assert result["durable_task"]["status"] == "success"
|
||||
|
||||
@@ -32,7 +32,7 @@ hatchet.event.push(
|
||||
hatchet.event.push(
|
||||
event_key=EVENT_KEY,
|
||||
payload={
|
||||
"should_skip": True,
|
||||
"should_skip": False,
|
||||
},
|
||||
options=PushEventOptions(
|
||||
scope="foobarbaz",
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import asyncio
|
||||
import json
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import AsyncGenerator, cast
|
||||
from typing import cast
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
@@ -255,7 +256,9 @@ async def test_async_event_bulk_push(hatchet: Hatchet) -> None:
|
||||
namespace = "bulk-test"
|
||||
|
||||
# Check that the returned events match the original events
|
||||
for original_event, returned_event in zip(sorted_events, sorted_returned_events):
|
||||
for original_event, returned_event in zip(
|
||||
sorted_events, sorted_returned_events, strict=False
|
||||
):
|
||||
assert returned_event.key == namespace + original_event.key
|
||||
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ event_workflow_with_filter = hatchet.workflow(
|
||||
def task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:
|
||||
print("event received")
|
||||
|
||||
return dict(ctx.filter_payload)
|
||||
return ctx.filter_payload
|
||||
|
||||
|
||||
# > Accessing the filter payload
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# > Lifespan
|
||||
|
||||
from typing import AsyncGenerator, cast
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import cast
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
from typing import AsyncGenerator, cast
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import cast
|
||||
from uuid import UUID
|
||||
|
||||
from psycopg_pool import ConnectionPool
|
||||
|
||||
@@ -16,7 +16,7 @@ logging_workflow = hatchet.workflow(
|
||||
@logging_workflow.task()
|
||||
def root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
|
||||
for i in range(12):
|
||||
logger.info("executed step1 - {}".format(i))
|
||||
logger.info(f"executed step1 - {i}")
|
||||
logger.info({"step1": "step1"})
|
||||
|
||||
time.sleep(0.1)
|
||||
@@ -32,7 +32,7 @@ def root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
|
||||
@logging_workflow.task()
|
||||
def context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
|
||||
for i in range(12):
|
||||
ctx.log("executed step1 - {}".format(i))
|
||||
ctx.log(f"executed step1 - {i}")
|
||||
ctx.log({"step1": "step1"})
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from collections.abc import Mapping
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Mapping
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
from pydantic import BaseModel
|
||||
@@ -10,13 +11,13 @@ from hatchet_sdk.context.context import Context
|
||||
from .hatchet_client import hatchet
|
||||
|
||||
|
||||
async def process_image(image_url: str, filters: List[str]) -> Dict[str, Any]:
|
||||
async def process_image(image_url: str, filters: list[str]) -> dict[str, Any]:
|
||||
# Do some image processing
|
||||
return {"url": image_url, "size": 100, "format": "png"}
|
||||
|
||||
|
||||
# > Before (Mergent)
|
||||
async def process_image_task(request: Any) -> Dict[str, Any]:
|
||||
async def process_image_task(request: Any) -> dict[str, Any]:
|
||||
image_url = request.json["image_url"]
|
||||
filters = request.json["filters"]
|
||||
try:
|
||||
@@ -33,12 +34,12 @@ async def process_image_task(request: Any) -> Dict[str, Any]:
|
||||
# > After (Hatchet)
|
||||
class ImageProcessInput(BaseModel):
|
||||
image_url: str
|
||||
filters: List[str]
|
||||
filters: list[str]
|
||||
|
||||
|
||||
class ImageProcessOutput(BaseModel):
|
||||
processed_url: str
|
||||
metadata: Dict[str, Any]
|
||||
metadata: dict[str, Any]
|
||||
|
||||
|
||||
@hatchet.task(
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from examples.non_retryable.worker import (
|
||||
@@ -9,6 +11,7 @@ from examples.non_retryable.worker import (
|
||||
from hatchet_sdk import Hatchet
|
||||
from hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType
|
||||
from hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails
|
||||
from hatchet_sdk.exceptions import FailedTaskRunExceptionGroup
|
||||
|
||||
|
||||
def find_id(runs: V1WorkflowRunDetails, match: str) -> str:
|
||||
@@ -19,9 +22,28 @@ def find_id(runs: V1WorkflowRunDetails, match: str) -> str:
|
||||
async def test_no_retry(hatchet: Hatchet) -> None:
|
||||
ref = await non_retryable_workflow.aio_run_no_wait()
|
||||
|
||||
with pytest.raises(Exception, match="retry"):
|
||||
with pytest.raises(FailedTaskRunExceptionGroup) as exc_info:
|
||||
await ref.aio_result()
|
||||
|
||||
exception_group = exc_info.value
|
||||
|
||||
assert len(exception_group.exceptions) == 2
|
||||
|
||||
exc_text = [e.exc for e in exception_group.exceptions]
|
||||
|
||||
non_retries = [
|
||||
e
|
||||
for e in exc_text
|
||||
if "This task should retry because it's not a NonRetryableException" in e
|
||||
]
|
||||
|
||||
other_errors = [e for e in exc_text if "This task should not retry" in e]
|
||||
|
||||
assert len(non_retries) == 1
|
||||
assert len(other_errors) == 1
|
||||
|
||||
await asyncio.sleep(3)
|
||||
|
||||
runs = await hatchet.runs.aio_get(ref.workflow_run_id)
|
||||
task_to_id = {
|
||||
task: find_id(runs, task.name)
|
||||
@@ -40,9 +62,7 @@ async def test_no_retry(hatchet: Hatchet) -> None:
|
||||
assert len(retrying_events) == 1
|
||||
|
||||
"""The task id of the retrying events should match the tasks that are retried"""
|
||||
assert {e.task_id for e in retrying_events} == {
|
||||
task_to_id[should_retry_wrong_exception_type],
|
||||
}
|
||||
assert retrying_events[0].task_id == task_to_id[should_retry_wrong_exception_type]
|
||||
|
||||
"""Three failed events should emit, one each for the two failing initial runs and one for the retry."""
|
||||
assert (
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user