mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2026-03-20 11:40:32 -05:00
* fix: retrieve payloads in bulk * fix: hash -> idempotency key * feat: initial hashing work * feat: check idempotency key if entry exists * fix: panic * feat: initial work on custom error for non-determinism * fix: handle nondeterminism error properly * feat: add error response, pub message to task controller * chore: lint * feat: add node id field to error proto * chore: rm a bunch of unhelpful cancellation logs * fix: conflict issues * fix: rm another log * fix: send node id properly * fix: improve what we hash * fix: improve error handling * fix: python issues * fix: don't hash or group id * fix: rm print * feat: add python test * fix: add timeout * fix: improve handling of non determinism error * fix: propagate node id through * fix: types, test * fix: make serializable * fix: no need to cancel internally anymore * fix: hide another internal log * fix: add link to docs * fix: copilot * fix: use sha256 * fix: test cleanup * fix: add error type enum * fix: handle exceptions on the worker * fix: clean up a bunch of cursor imports * fix: cursor docstring formatting * fix: simplify idempotency key func * fix: add back cancellation logs * feat: tests for idempotency keys * fix: add a couple more for priority and metadata * chore: gen * fix: python reconnect * fix: noisy error * fix: improve log * fix: don't run durable listener if no durable tasks are registered * fix: non-null idempotency keys
91 lines
2.4 KiB
Python
91 lines
2.4 KiB
Python
import pytest
|
|
|
|
from examples.unit_testing.workflows import (
|
|
Lifespan,
|
|
UnitTestInput,
|
|
UnitTestOutput,
|
|
async_complex_workflow,
|
|
async_simple_workflow,
|
|
async_standalone,
|
|
durable_async_complex_workflow,
|
|
durable_async_simple_workflow,
|
|
durable_async_standalone,
|
|
start,
|
|
sync_complex_workflow,
|
|
sync_simple_workflow,
|
|
sync_standalone,
|
|
)
|
|
from hatchet_sdk import Task
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
"func",
|
|
[
|
|
sync_standalone,
|
|
sync_simple_workflow,
|
|
sync_complex_workflow,
|
|
],
|
|
)
|
|
def test_simple_unit_sync(func: Task[UnitTestInput, UnitTestOutput]) -> None:
|
|
input = UnitTestInput(key="test_key", number=42)
|
|
additional_metadata = {"meta_key": "meta_value"}
|
|
lifespan = Lifespan(mock_db_url="sqlite:///:memory:")
|
|
retry_count = 1
|
|
|
|
expected_output = UnitTestOutput(
|
|
key=input.key,
|
|
number=input.number,
|
|
additional_metadata=additional_metadata,
|
|
retry_count=retry_count,
|
|
mock_db_url=lifespan.mock_db_url,
|
|
)
|
|
|
|
assert (
|
|
func.mock_run(
|
|
input=input,
|
|
additional_metadata=additional_metadata,
|
|
lifespan=lifespan,
|
|
retry_count=retry_count,
|
|
parent_outputs={start.name: expected_output.model_dump()},
|
|
)
|
|
== expected_output
|
|
)
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
"func",
|
|
[
|
|
async_standalone,
|
|
durable_async_standalone,
|
|
async_simple_workflow,
|
|
durable_async_simple_workflow,
|
|
async_complex_workflow,
|
|
durable_async_complex_workflow,
|
|
],
|
|
)
|
|
@pytest.mark.asyncio(loop_scope="session")
|
|
async def test_simple_unit_async(func: Task[UnitTestInput, UnitTestOutput]) -> None:
|
|
input = UnitTestInput(key="test_key", number=42)
|
|
additional_metadata = {"meta_key": "meta_value"}
|
|
lifespan = Lifespan(mock_db_url="sqlite:///:memory:")
|
|
retry_count = 1
|
|
|
|
expected_output = UnitTestOutput(
|
|
key=input.key,
|
|
number=input.number,
|
|
additional_metadata=additional_metadata,
|
|
retry_count=retry_count,
|
|
mock_db_url=lifespan.mock_db_url,
|
|
)
|
|
|
|
assert (
|
|
await func.aio_mock_run(
|
|
input=input,
|
|
additional_metadata=additional_metadata,
|
|
lifespan=lifespan,
|
|
retry_count=retry_count,
|
|
parent_outputs={start.name: expected_output.model_dump()},
|
|
)
|
|
== expected_output
|
|
)
|