Fix: Auto-generate docs snippets and examples (#2139)

* fix: gitignore all the generated stuff

* debug: try fixing build

* debug: build error part ii

* debug: move more deps out of dev

* fix: lock

* debug: lockfile

* fix: make dir

* fix: ci

* fix: dir

* debug: sed

* fix: sed

* debug: allow skipping

* Revert "debug: allow skipping"

This reverts commit 88e0ff870d.

* debug: ci

* fix: corepack

* debug: dir

* debug: sed

* debug: path

* fix: rm sync docs for now

* fix: remove more stuff

* fix: rm unused stuff

* fix: rm copy:app

* chore: lint

* fix: rm prettier from boot

* fix: couple missing scripts

* feat: auto-gen examples on push to main

* debug: test on this branch

* fix: install pnpm

* fix: cd

* fix: cmd

* Auto-generate files after merge [skip ci]

* fix: only copy examples

* debug: dummy commit for examples check

* chore: regenerate examples

* fix: naming

* fix: unwind dummy

* fix: only run on main

* fix: pre commit

* fix: naming

* chore: gen, fix task pre

* feat: create pr

* feat: only push examples changes

* fix: don't run from this branch

* fix: regen lockfile

* fix: regen docs lockfile

---------

Co-authored-by: GitHub Action <action@github.com>
This commit is contained in:
matt
2025-08-14 18:17:29 -05:00
committed by GitHub
parent 266fb077ac
commit 57bb24aef1
981 changed files with 2365 additions and 15287 deletions
@@ -0,0 +1,47 @@
import asyncio
import time
from uuid import uuid4
import pytest
from examples.concurrency_cancel_in_progress.worker import (
WorkflowInput,
concurrency_cancel_in_progress_workflow,
)
from hatchet_sdk import Hatchet, TriggerWorkflowOptions, V1TaskStatus, WorkflowRunRef
@pytest.mark.asyncio(loop_scope="session")
async def test_run(hatchet: Hatchet) -> None:
test_run_id = str(uuid4())
refs: list[WorkflowRunRef] = []
for i in range(10):
ref = await concurrency_cancel_in_progress_workflow.aio_run_no_wait(
WorkflowInput(group="A"),
options=TriggerWorkflowOptions(
additional_metadata={"test_run_id": test_run_id, "i": str(i)},
),
)
refs.append(ref)
await asyncio.sleep(1)
for ref in refs:
print(f"Waiting for run {ref.workflow_run_id} to complete")
try:
await ref.aio_result()
except Exception:
continue
## wait for the olap repo to catch up
await asyncio.sleep(5)
runs = sorted(
hatchet.runs.list(additional_metadata={"test_run_id": test_run_id}).rows,
key=lambda r: int((r.additional_metadata or {}).get("i", "0")),
)
assert len(runs) == 10
assert (runs[-1].additional_metadata or {}).get("i") == "9"
assert runs[-1].status == V1TaskStatus.COMPLETED
assert all(r.status == V1TaskStatus.CANCELLED for r in runs[:-1])
@@ -0,0 +1,39 @@
import asyncio
from pydantic import BaseModel
from hatchet_sdk import (
ConcurrencyExpression,
ConcurrencyLimitStrategy,
Context,
Hatchet,
)
hatchet = Hatchet(debug=True)
class WorkflowInput(BaseModel):
group: str
concurrency_cancel_in_progress_workflow = hatchet.workflow(
name="ConcurrencyCancelInProgress",
concurrency=ConcurrencyExpression(
expression="input.group",
max_runs=1,
limit_strategy=ConcurrencyLimitStrategy.CANCEL_IN_PROGRESS,
),
input_validator=WorkflowInput,
)
@concurrency_cancel_in_progress_workflow.task()
async def step1(input: WorkflowInput, ctx: Context) -> None:
for _ in range(50):
await asyncio.sleep(0.10)
@concurrency_cancel_in_progress_workflow.task(parents=[step1])
async def step2(input: WorkflowInput, ctx: Context) -> None:
for _ in range(50):
await asyncio.sleep(0.10)