mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2026-01-04 07:39:43 -06:00
* fix: register durable steps and workflows separately * chore: initial copy of pooled listener * feat: initial generic impl * feat: use pooled listener for wf run listener * refactor: move listeners to subdir * feat: refactor durable event listener * fix: bug * feat: share single pooled workflow listener and event listener everywhere * cruft: rm hatchet fixture * fix: rebase issue * feat: remove asyncio api client in favor of sync one * chore: minor version * proposal: crazy hack idea to make the workflow run listener work * fix: sleeps and error handling * Revert "cruft: rm hatchet fixture" This reverts commit b75f625e6ccec095e8c4e294d6727db166796411. * fix: set timeout * fix: rm pytest-timeout * fix: rm retry * fix: use v1 by default * fix: try removing retry state * fix: try using async client? * fix: try running sequentially * debug: loop * debug: maybe it's this? * fix: lint * fix: re-remove unused fixtures * fix: lazily create clients in admin client * fix: default * fix: lazily initialize dispatcher client * fix: hint * fix: no. way. * feat: add back retries in ci * fix: clients + imports * fix: loop scope * debug: try running skipped tests in ci again * Revert "debug: try running skipped tests in ci again" This reverts commit 8d9e18150e5207ee6051d8df8a6fe2a7504c722e. * fix: rm duped code * refactor: rename everything as `to_proto` * refactor: removals of `namespace` being passed around * fix: task output stupidity * feat: add deprecation warning * fix: remove more unused code * feat: mix sync and async in dag example * fix: autouse * fix: more input types * feat: remove ability to pass in loop * fix: overload key gen
55 lines
1.8 KiB
Python
55 lines
1.8 KiB
Python
import pytest
|
|
|
|
from hatchet_sdk.clients.events import BulkPushEventOptions, BulkPushEventWithMetadata
|
|
from hatchet_sdk.hatchet import Hatchet
|
|
|
|
|
|
@pytest.mark.asyncio(loop_scope="session")
|
|
async def test_event_push(hatchet: Hatchet) -> None:
|
|
e = hatchet.event.push("user:create", {"test": "test"})
|
|
|
|
assert e.eventId is not None
|
|
|
|
|
|
@pytest.mark.asyncio(loop_scope="session")
|
|
async def test_async_event_push(hatchet: Hatchet) -> None:
|
|
e = await hatchet.event.aio_push("user:create", {"test": "test"})
|
|
|
|
assert e.eventId is not None
|
|
|
|
|
|
@pytest.mark.asyncio(loop_scope="session")
|
|
async def test_async_event_bulk_push(hatchet: Hatchet) -> None:
|
|
|
|
events = [
|
|
BulkPushEventWithMetadata(
|
|
key="event1",
|
|
payload={"message": "This is event 1"},
|
|
additional_metadata={"source": "test", "user_id": "user123"},
|
|
),
|
|
BulkPushEventWithMetadata(
|
|
key="event2",
|
|
payload={"message": "This is event 2"},
|
|
additional_metadata={"source": "test", "user_id": "user456"},
|
|
),
|
|
BulkPushEventWithMetadata(
|
|
key="event3",
|
|
payload={"message": "This is event 3"},
|
|
additional_metadata={"source": "test", "user_id": "user789"},
|
|
),
|
|
]
|
|
opts = BulkPushEventOptions(namespace="bulk-test")
|
|
|
|
e = await hatchet.event.aio_bulk_push(events, opts)
|
|
|
|
assert len(e) == 3
|
|
|
|
# Sort both lists of events by their key to ensure comparison order
|
|
sorted_events = sorted(events, key=lambda x: x.key)
|
|
sorted_returned_events = sorted(e, key=lambda x: x.key)
|
|
namespace = "bulk-test"
|
|
|
|
# Check that the returned events match the original events
|
|
for original_event, returned_event in zip(sorted_events, sorted_returned_events):
|
|
assert returned_event.key == namespace + original_event.key
|