Feat: Misc. Python improvements + Streaming Improvements (#1846)

* fix: contextvars explicit copy

* feat: fix a ton of ruff errors

* fix: couple more ruff rules

* fix: ignore unhelpful rule

* fix: exception group in newer Python versions for improved handling

* fix: workflow docs

* feat: context docs

* feat: simple task counter

* feat: config for setting max tasks

* feat: graceful exit once worker exceeds max tasks

* fix: optional

* fix: docs

* fix: events docs + gen

* chore: gen

* fix: one more dangling task

* feat: add xdist in ci

* fix: CI

* fix: xdist fails me once again

* fix: fix + extend some tests

* fix: test cleanup

* fix: exception group

* fix: ugh

* feat: changelog

* Add Ruff linter callout to post

* refactor: clean up runner error handling

* feat: improved errors

* fix: lint

* feat: hacky serde impl

* fix: improve serde + formatting

* fix: logging

* fix: lint

* fix: unexpected errors

* fix: naming, ruff

* fix: rm cruft

* Fix: Attempt to fix namespacing issue in event waits (#1885)

* feat: add xdist in ci

* fix: attempt to fix namespacing issue in event waits

* fix: namespaced worker names

* fix: applied namespace to the wrong thing

* fix: rm hack

* drive by: namespacing improvement

* fix: delay

* fix: changelog

* fix: initial log work

* fix: more logging work

* fix: rm print cruft

* feat: use a queue to send logs

* fix: sentinel value to stop the loop

* fix: use the log sender everywhere

* fix: make streaming blocking, remove more thread pools

* feat: changelog

* fix: linting issues

* fix: broken test

* chore: bunch more generated stuff

* fix: changelog

* fix: one more

* fix: mypy

* chore: gen

* Feat: Streaming Improvements (#1886)

* Fix: Filter list improvements (#1899)

* fix: uuid validation

* fix: improve filter filtering

* fix: inner join

* fix: bug in workflow cached prop

* chore: bump

* fix: lint

* chore: changelog

* fix: separate filter queries

* feat: improve filter filtering

* fix: queries and the like

* feat: add xdist in ci

* feat: streaming test + gen

* feat: add index to stream event

* fix: rm langfuse dep

* fix: lf

* chore: gen

* feat: impl index for stream on context

* feat: tweak protos

* feat: extend test

* feat: send event index through queue

* feat: first pass + debug logging

* debug: fixes

* debug: more possible issues

* feat: generate new stream event protos

* feat: first pass at using an alternate exchange for replaying incoming stream events

* fix: exchange create timing

* fix: rm unused protos

* chore: gen

* feat: python cleanup

* fix: revert rabbit changes

* fix: unwind a bunch of cruft

* fix: optional index

* chore: gen python

* fix: event index nil handling

* feat: improve test

* fix: stream impl in sdk

* fix: make test faster

* chore: gen a ton more stuff

* fix: test

* fix: sorting helper

* fix: bug

* fix: one more ordering bug

* feat: add some tests for buffering logic

* feat: hangup test

* feat: test no buffering if no index sent

* fix: regular mutex

* fix: pr feedback

* fix: conflicts
This commit is contained in:
Matt Kaye
2025-06-25 10:11:01 -04:00
committed by GitHub
parent eb08481483
commit 2f33dd4dbd
157 changed files with 3243 additions and 1369 deletions

View File

@@ -33,7 +33,6 @@ def step1(input: WorkflowInput, ctx: Context) -> None:
print("starting step1")
time.sleep(2)
print("finished step1")
pass
def main() -> None:

View File

@@ -3,7 +3,7 @@ from datetime import timedelta
from typing import Any
from hatchet_sdk import Context, EmptyModel, Hatchet, TriggerWorkflowOptions
from hatchet_sdk.clients.admin import DedupeViolationErr
from hatchet_sdk.exceptions import DedupeViolationError
hatchet = Hatchet(debug=True)
@@ -20,15 +20,13 @@ async def spawn(input: EmptyModel, ctx: Context) -> dict[str, list[Any]]:
for i in range(2):
try:
results.append(
(
dedupe_child_wf.aio_run(
options=TriggerWorkflowOptions(
additional_metadata={"dedupe": "test"}, key=f"child{i}"
),
)
dedupe_child_wf.aio_run(
options=TriggerWorkflowOptions(
additional_metadata={"dedupe": "test"}, key=f"child{i}"
),
)
)
except DedupeViolationErr as e:
except DedupeViolationError as e:
print(f"dedupe violation {e}")
continue

View File

@@ -1,5 +1,4 @@
import asyncio
import os
import pytest
@@ -7,10 +6,6 @@ from examples.durable.worker import EVENT_KEY, SLEEP_TIME, durable_workflow
from hatchet_sdk import Hatchet
@pytest.mark.skipif(
os.getenv("CI", "false").lower() == "true",
reason="Skipped in CI because of unreliability",
)
@pytest.mark.asyncio(loop_scope="session")
async def test_durable(hatchet: Hatchet) -> None:
ref = durable_workflow.run_no_wait()
@@ -28,6 +23,12 @@ async def test_durable(hatchet: Hatchet) -> None:
active_workers = [w for w in workers.rows if w.status == "ACTIVE"]
assert len(active_workers) == 2
assert any(w.name == "e2e-test-worker" for w in active_workers)
assert any(w.name.endswith("e2e-test-worker_durable") for w in active_workers)
assert any(
w.name == hatchet.config.apply_namespace("e2e-test-worker")
for w in active_workers
)
assert any(
w.name == hatchet.config.apply_namespace("e2e-test-worker_durable")
for w in active_workers
)
assert result["durable_task"]["status"] == "success"

View File

@@ -30,7 +30,7 @@ hatchet.event.push(
hatchet.event.push(
event_key=EVENT_KEY,
payload={
"should_skip": True,
"should_skip": False,
},
options=PushEventOptions(
scope="foobarbaz",

View File

@@ -1,8 +1,9 @@
import asyncio
import json
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from datetime import datetime, timedelta, timezone
from typing import AsyncGenerator, cast
from typing import cast
from uuid import uuid4
import pytest
@@ -255,7 +256,9 @@ async def test_async_event_bulk_push(hatchet: Hatchet) -> None:
namespace = "bulk-test"
# Check that the returned events match the original events
for original_event, returned_event in zip(sorted_events, sorted_returned_events):
for original_event, returned_event in zip(
sorted_events, sorted_returned_events, strict=False
):
assert returned_event.key == namespace + original_event.key

View File

@@ -44,7 +44,7 @@ event_workflow_with_filter = hatchet.workflow(
def task(input: EventWorkflowInput, ctx: Context) -> dict[str, str]:
print("event received")
return dict(ctx.filter_payload)
return ctx.filter_payload
# > Accessing the filter payload

View File

@@ -1,6 +1,7 @@
# > Lifespan
from typing import AsyncGenerator, cast
from collections.abc import AsyncGenerator
from typing import cast
from pydantic import BaseModel

View File

@@ -1,4 +1,5 @@
from typing import AsyncGenerator, cast
from collections.abc import AsyncGenerator
from typing import cast
from uuid import UUID
from psycopg_pool import ConnectionPool

View File

@@ -16,7 +16,7 @@ logging_workflow = hatchet.workflow(
@logging_workflow.task()
def root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
for i in range(12):
logger.info("executed step1 - {}".format(i))
logger.info(f"executed step1 - {i}")
logger.info({"step1": "step1"})
time.sleep(0.1)
@@ -31,7 +31,7 @@ def root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
@logging_workflow.task()
def context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
for i in range(12):
ctx.log("executed step1 - {}".format(i))
ctx.log(f"executed step1 - {i}")
ctx.log({"step1": "step1"})
time.sleep(0.1)

View File

@@ -1,5 +1,6 @@
from collections.abc import Mapping
from datetime import datetime, timedelta, timezone
from typing import Any, Dict, List, Mapping
from typing import Any
import requests
from pydantic import BaseModel
@@ -10,13 +11,13 @@ from hatchet_sdk.context.context import Context
from .hatchet_client import hatchet
async def process_image(image_url: str, filters: List[str]) -> Dict[str, Any]:
async def process_image(image_url: str, filters: list[str]) -> dict[str, Any]:
# Do some image processing
return {"url": image_url, "size": 100, "format": "png"}
# > Before (Mergent)
async def process_image_task(request: Any) -> Dict[str, Any]:
async def process_image_task(request: Any) -> dict[str, Any]:
image_url = request.json["image_url"]
filters = request.json["filters"]
try:
@@ -32,12 +33,12 @@ async def process_image_task(request: Any) -> Dict[str, Any]:
# > After (Hatchet)
class ImageProcessInput(BaseModel):
image_url: str
filters: List[str]
filters: list[str]
class ImageProcessOutput(BaseModel):
processed_url: str
metadata: Dict[str, Any]
metadata: dict[str, Any]
@hatchet.task(

View File

@@ -1,3 +1,5 @@
import asyncio
import pytest
from examples.non_retryable.worker import (
@@ -9,6 +11,7 @@ from examples.non_retryable.worker import (
from hatchet_sdk import Hatchet
from hatchet_sdk.clients.rest.models.v1_task_event_type import V1TaskEventType
from hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails
from hatchet_sdk.exceptions import FailedTaskRunExceptionGroup
def find_id(runs: V1WorkflowRunDetails, match: str) -> str:
@@ -19,9 +22,28 @@ def find_id(runs: V1WorkflowRunDetails, match: str) -> str:
async def test_no_retry(hatchet: Hatchet) -> None:
ref = await non_retryable_workflow.aio_run_no_wait()
with pytest.raises(Exception, match="retry"):
with pytest.raises(FailedTaskRunExceptionGroup) as exc_info:
await ref.aio_result()
exception_group = exc_info.value
assert len(exception_group.exceptions) == 2
exc_text = [e.exc for e in exception_group.exceptions]
non_retries = [
e
for e in exc_text
if "This task should retry because it's not a NonRetryableException" in e
]
other_errors = [e for e in exc_text if "This task should not retry" in e]
assert len(non_retries) == 1
assert len(other_errors) == 1
await asyncio.sleep(3)
runs = await hatchet.runs.aio_get(ref.workflow_run_id)
task_to_id = {
task: find_id(runs, task.name)
@@ -40,9 +62,7 @@ async def test_no_retry(hatchet: Hatchet) -> None:
assert len(retrying_events) == 1
"""The task id of the retrying events should match the tasks that are retried"""
assert {e.task_id for e in retrying_events} == {
task_to_id[should_retry_wrong_exception_type],
}
assert retrying_events[0].task_id == task_to_id[should_retry_wrong_exception_type]
"""Three failed events should emit, one each for the two failing initial runs and one for the retry."""
assert (

View File

@@ -1,8 +1,8 @@
import base64
import os
from langfuse import Langfuse # type: ignore[import-untyped]
from langfuse.openai import AsyncOpenAI # type: ignore[import-untyped]
from langfuse import Langfuse # type: ignore
from langfuse.openai import AsyncOpenAI # type: ignore
# > Configure Langfuse
LANGFUSE_AUTH = base64.b64encode(

View File

@@ -1,6 +1,6 @@
import asyncio
from langfuse import get_client # type: ignore[import-untyped]
from langfuse import get_client # type: ignore
from opentelemetry.trace import StatusCode
from examples.opentelemetry_instrumentation.langfuse.worker import langfuse_task

View File

@@ -1,8 +1,9 @@
import asyncio
from collections.abc import AsyncGenerator
from datetime import datetime, timedelta, timezone
from random import choice
from subprocess import Popen
from typing import Any, AsyncGenerator, Literal
from typing import Any, Literal
from uuid import uuid4
import pytest
@@ -58,7 +59,7 @@ async def dummy_runs() -> None:
await asyncio.sleep(3)
return None
return
@pytest.mark.parametrize(

View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -114,7 +114,7 @@ propcache = ">=0.2.0"
yarl = ">=1.17.0,<2.0"
[package.extras]
speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
[[package]]
name = "aiohttp-retry"
@@ -199,12 +199,12 @@ files = [
]
[package.extras]
benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]
tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
[[package]]
name = "cel-python"
@@ -460,14 +460,14 @@ setuptools = "*"
[[package]]
name = "hatchet-sdk"
version = "1.12.2"
version = "1.0.0a1"
description = ""
optional = false
python-versions = "<4.0,>=3.10"
groups = ["main"]
files = [
{file = "hatchet_sdk-1.12.2-py3-none-any.whl", hash = "sha256:a2701fc9fe277935346bc1f974bce075afa61d6aae1e43ef01d3e9a06abd30ce"},
{file = "hatchet_sdk-1.12.2.tar.gz", hash = "sha256:77a91539640d732523bff6135593d56171f5cb0185b3e86bbc561e27b18e9a32"},
{file = "hatchet_sdk-1.0.0a1-py3-none-any.whl", hash = "sha256:bfc84358c8842cecd0d95b30645109733b7292dff0db1a776ca862785ee93d7f"},
{file = "hatchet_sdk-1.0.0a1.tar.gz", hash = "sha256:f0272bbaac6faed75ff727826e9f7b1ac42ae597f9b590e14d392aada9c9692f"},
]
[package.dependencies]
@@ -483,11 +483,13 @@ grpcio-tools = [
{version = ">=1.64.1,<1.68.dev0 || >=1.69.dev0", markers = "python_version < \"3.13\""},
{version = ">=1.69.0", markers = "python_version >= \"3.13\""},
]
nest-asyncio = ">=1.6.0,<2.0.0"
prometheus-client = ">=0.21.1,<0.22.0"
protobuf = ">=5.29.5,<6.0.0"
protobuf = ">=5.29.1,<6.0.0"
pydantic = ">=2.6.3,<3.0.0"
pydantic-settings = ">=2.7.1,<3.0.0"
python-dateutil = ">=2.9.0.post0,<3.0.0"
pyyaml = ">=6.0.1,<7.0.0"
tenacity = ">=8.4.1"
urllib3 = ">=1.26.20"
@@ -643,6 +645,18 @@ files = [
[package.dependencies]
typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
[[package]]
name = "nest-asyncio"
version = "1.6.0"
description = "Patch asyncio to allow nested event loops"
optional = false
python-versions = ">=3.5"
groups = ["main"]
files = [
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
]
[[package]]
name = "prometheus-client"
version = "0.21.1"
@@ -768,23 +782,23 @@ files = [
[[package]]
name = "protobuf"
version = "5.29.5"
version = "5.29.4"
description = ""
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079"},
{file = "protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc"},
{file = "protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671"},
{file = "protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015"},
{file = "protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61"},
{file = "protobuf-5.29.5-cp38-cp38-win32.whl", hash = "sha256:ef91363ad4faba7b25d844ef1ada59ff1604184c0bcd8b39b8a6bef15e1af238"},
{file = "protobuf-5.29.5-cp38-cp38-win_amd64.whl", hash = "sha256:7318608d56b6402d2ea7704ff1e1e4597bee46d760e7e4dd42a3d45e24b87f2e"},
{file = "protobuf-5.29.5-cp39-cp39-win32.whl", hash = "sha256:6f642dc9a61782fa72b90878af134c5afe1917c89a568cd3476d758d3c3a0736"},
{file = "protobuf-5.29.5-cp39-cp39-win_amd64.whl", hash = "sha256:470f3af547ef17847a28e1f47200a1cbf0ba3ff57b7de50d22776607cd2ea353"},
{file = "protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5"},
{file = "protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84"},
{file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"},
{file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"},
{file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"},
{file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"},
{file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"},
{file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"},
{file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"},
{file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"},
{file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"},
{file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"},
{file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"},
]
[[package]]
@@ -806,7 +820,7 @@ typing-extensions = ">=4.12.2"
[package.extras]
email = ["email-validator (>=2.0.0)"]
timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""]
timezone = ["tzdata"]
[[package]]
name = "pydantic-core"
@@ -1048,13 +1062,13 @@ files = [
]
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]
core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]
[[package]]
name = "six"
@@ -1133,7 +1147,7 @@ files = [
]
[package.extras]
brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
@@ -1238,4 +1252,4 @@ propcache = ">=0.2.0"
[metadata]
lock-version = "2.1"
python-versions = "^3.10"
content-hash = "fe1fd90bab8fe4470ec4afbd5c1331962a821f6751a8f02bea567f9b9f44b815"
content-hash = "74c12e499aa797ca5c8559af579f1212b0e4e3a77f068f9385db39d70ba304e0"

View File

@@ -1,19 +1,16 @@
import asyncio
from examples.streaming.worker import streaming_workflow
from examples.streaming.worker import stream_task
from hatchet_sdk.clients.listeners.run_event_listener import StepRunEventType
async def main() -> None:
ref = await streaming_workflow.aio_run_no_wait()
await asyncio.sleep(1)
ref = await stream_task.aio_run_no_wait()
stream = ref.stream()
async for chunk in stream:
print(chunk)
async for chunk in ref.stream():
if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:
print(chunk.payload, flush=True, end="")
if __name__ == "__main__":
import asyncio
asyncio.run(main())

View File

@@ -1,10 +1,10 @@
import time
from examples.streaming.worker import streaming_workflow
from examples.streaming.worker import stream_task
def main() -> None:
ref = streaming_workflow.run_no_wait()
ref = stream_task.run_no_wait()
time.sleep(1)
stream = ref.stream()

View File

@@ -0,0 +1,47 @@
import asyncio
from datetime import datetime, timedelta, timezone
from subprocess import Popen
from typing import Any
import pytest
from examples.streaming.worker import chunks, stream_task
from hatchet_sdk import Hatchet
from hatchet_sdk.clients.listeners.run_event_listener import (
StepRunEvent,
StepRunEventType,
)
@pytest.mark.parametrize(
"on_demand_worker",
[
(
["poetry", "run", "python", "examples/streaming/worker.py", "--slots", "1"],
8008,
)
],
indirect=True,
)
@pytest.mark.parametrize("execution_number", range(1))
@pytest.mark.asyncio(loop_scope="session")
async def test_streaming_ordering_and_completeness(
execution_number: int,
hatchet: Hatchet,
on_demand_worker: Popen[Any],
) -> None:
ref = await stream_task.aio_run_no_wait()
ix = 0
anna_karenina = ""
async for chunk in ref.stream():
if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:
assert chunks[ix] == chunk.payload
ix += 1
anna_karenina += chunk.payload
assert ix == len(chunks)
assert anna_karenina == "".join(chunks)
await ref.aio_result()

View File

@@ -1,23 +1,39 @@
import asyncio
from datetime import datetime, timedelta, timezone
from typing import Generator
from hatchet_sdk import Context, EmptyModel, Hatchet
hatchet = Hatchet(debug=True)
hatchet = Hatchet(debug=False)
# > Streaming
streaming_workflow = hatchet.workflow(name="StreamingWorkflow")
content = """
Happy families are all alike; every unhappy family is unhappy in its own way.
Everything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him. This position of affairs had now lasted three days, and not only the husband and wife themselves, but all the members of their family and household, were painfully conscious of it. Every person in the house felt that there was so sense in their living together, and that the stray people brought together by chance in any inn had more in common with one another than they, the members of the family and household of the Oblonskys. The wife did not leave her own room, the husband had not been at home for three days. The children ran wild all over the house; the English governess quarreled with the housekeeper, and wrote to a friend asking her to look out for a new situation for her; the man-cook had walked off the day before just at dinner time; the kitchen-maid, and the coachman had given warning.
"""
@streaming_workflow.task()
async def step1(input: EmptyModel, ctx: Context) -> None:
for i in range(10):
await asyncio.sleep(1)
ctx.put_stream(f"Processing {i}")
def create_chunks(content: str, n: int) -> Generator[str, None, None]:
for i in range(0, len(content), n):
yield content[i : i + n]
chunks = list(create_chunks(content, 10))
@hatchet.task()
async def stream_task(input: EmptyModel, ctx: Context) -> None:
await asyncio.sleep(2)
for chunk in chunks:
ctx.put_stream(chunk)
await asyncio.sleep(0.05)
def main() -> None:
worker = hatchet.worker("test-worker", workflows=[streaming_workflow])
worker = hatchet.worker("test-worker", workflows=[stream_task])
worker.start()

View File

@@ -7,7 +7,10 @@ from examples.timeout.worker import refresh_timeout_wf, timeout_wf
async def test_execution_timeout() -> None:
run = timeout_wf.run_no_wait()
with pytest.raises(Exception, match="(Task exceeded timeout|TIMED_OUT)"):
with pytest.raises(
Exception,
match="(Task exceeded timeout|TIMED_OUT|Workflow run .* failed with multiple errors)",
):
await run.aio_result()

View File

@@ -1,5 +1,4 @@
import asyncio
import os
import pytest
@@ -7,10 +6,6 @@ from examples.waits.worker import task_condition_workflow
from hatchet_sdk import Hatchet
@pytest.mark.skipif(
os.getenv("CI", "false").lower() == "true",
reason="Skipped in CI because of unreliability",
)
@pytest.mark.asyncio(loop_scope="session")
async def test_waits(hatchet: Hatchet) -> None: