Feat: Top-level stream consumer in the SDKs (#1917)

* feat: add stream sub on top level

* feat: clean up examples

* chore: gen

* feat: move stream onto the runs client in ts

* fix: examples

* chore: gen

* fix: circular import issues

* chore: lint

* feat: first pass at Next app

* fix: pull next out to top level

* fix: all the things

* fix: get it all wired up

* fix: imports

* fix: lint rule

* fix: just use js

* fix: revert tsconfig changes

* fix: check in pages

* fix: hangup event in streaming impl

* chore: gen

* chore: generate again, remove lots of nextjs stuff

* fix: one more ignore

* fix: gen

* fix: ignore

* fix: ugh

* fix: simplify a bunch

* fix: lint

* fix: rm gen cruft

* fix: changelog

* feat: implement list with pagination

* feat: add some tests

* feat: add warnings

* fix: update workflow / task methods

* chore: version

* feat: retries
This commit is contained in:
Matt Kaye
2025-07-03 18:49:16 -04:00
committed by GitHub
parent 7679732b15
commit 3442c11106
51 changed files with 614 additions and 104 deletions

View File

@@ -4,23 +4,20 @@ from fastapi import FastAPI
from fastapi.responses import StreamingResponse
from examples.streaming.worker import stream_task
from hatchet_sdk import RunEventListener, StepRunEventType
from hatchet_sdk import Hatchet
# > FastAPI Proxy
hatchet = Hatchet()
app = FastAPI()
async def generate_stream(stream: RunEventListener) -> AsyncGenerator[str, None]:
async for chunk in stream:
if chunk.type == StepRunEventType.STEP_RUN_EVENT_TYPE_STREAM:
yield chunk.payload
@app.get("/stream")
async def stream() -> StreamingResponse:
ref = await stream_task.aio_run_no_wait()
return StreamingResponse(generate_stream(ref.stream()), media_type="text/plain")
return StreamingResponse(
hatchet.runs.subscribe_to_stream(ref.workflow_run_id), media_type="text/plain"
)