Fe overhaul docs (#1640)

* api changes

* doc changes

* move docs

* generated

* generate

* pkg

* backmerge main

* revert to main

* revert main

* race?

* remove go tests
This commit is contained in:
Gabe Ruttner
2025-04-30 17:10:09 -04:00
committed by GitHub
parent 799b5d0dcf
commit 8e80faf2d6
1503 changed files with 36645 additions and 1235 deletions
@@ -0,0 +1,55 @@
import time
import pytest
from examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow
from hatchet_sdk.workflow_run import WorkflowRunRef
@pytest.mark.skip(reason="The timing for this test is not reliable")
@pytest.mark.asyncio(loop_scope="session")
async def test_run() -> None:
num_groups = 2
runs: list[WorkflowRunRef] = []
# Start all runs
for i in range(1, num_groups + 1):
run = concurrency_limit_rr_workflow.run_no_wait()
runs.append(run)
run = concurrency_limit_rr_workflow.run_no_wait()
runs.append(run)
# Wait for all results
successful_runs = []
cancelled_runs = []
start_time = time.time()
# Process each run individually
for i, run in enumerate(runs, start=1):
try:
result = await run.aio_result()
successful_runs.append((i, result))
except Exception as e:
if "CANCELLED_BY_CONCURRENCY_LIMIT" in str(e):
cancelled_runs.append((i, str(e)))
else:
raise # Re-raise if it's an unexpected error
end_time = time.time()
total_time = end_time - start_time
# Check that we have the correct number of successful and cancelled runs
assert (
len(successful_runs) == 4
), f"Expected 4 successful runs, got {len(successful_runs)}"
assert (
len(cancelled_runs) == 0
), f"Expected 0 cancelled run, got {len(cancelled_runs)}"
# Check that the total time is close to 2 seconds
assert (
3.8 <= total_time <= 7
), f"Expected runtime to be about 4 seconds, but it took {total_time:.2f} seconds"
print(f"Total execution time: {total_time:.2f} seconds")
@@ -0,0 +1,15 @@
from examples.concurrency_limit_rr.worker import (
WorkflowInput,
concurrency_limit_rr_workflow,
)
from hatchet_sdk import Hatchet
hatchet = Hatchet()
for i in range(200):
group = "0"
if i % 2 == 0:
group = "1"
concurrency_limit_rr_workflow.run(WorkflowInput(group=group))
@@ -0,0 +1,50 @@
import time
from pydantic import BaseModel
from hatchet_sdk import (
ConcurrencyExpression,
ConcurrencyLimitStrategy,
Context,
Hatchet,
)
hatchet = Hatchet(debug=True)
# > Concurrency Strategy With Key
class WorkflowInput(BaseModel):
group: str
concurrency_limit_rr_workflow = hatchet.workflow(
name="ConcurrencyDemoWorkflowRR",
concurrency=ConcurrencyExpression(
expression="input.group",
max_runs=1,
limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,
),
input_validator=WorkflowInput,
)
@concurrency_limit_rr_workflow.task()
def step1(input: WorkflowInput, ctx: Context) -> None:
print("starting step1")
time.sleep(2)
print("finished step1")
pass
def main() -> None:
worker = hatchet.worker(
"concurrency-demo-worker-rr",
slots=10,
workflows=[concurrency_limit_rr_workflow],
)
worker.start()
if __name__ == "__main__":
main()