Fe overhaul docs (#1640)

* api changes

* doc changes

* move docs

* generated

* generate

* pkg

* backmerge main

* revert to main

* revert main

* race?

* remove go tests
This commit is contained in:
Gabe Ruttner
2025-04-30 17:10:09 -04:00
committed by GitHub
parent 799b5d0dcf
commit 8e80faf2d6
1503 changed files with 36645 additions and 1235 deletions

View File

@@ -0,0 +1,17 @@
# > RootLogger
import logging
from hatchet_sdk import ClientConfig, Hatchet
logging.basicConfig(level=logging.INFO)
root_logger = logging.getLogger()
hatchet = Hatchet(
debug=True,
config=ClientConfig(
logger=root_logger,
),
)

View File

@@ -0,0 +1,10 @@
import pytest
from examples.logger.workflow import logging_workflow
@pytest.mark.asyncio(loop_scope="session")
async def test_run() -> None:
result = await logging_workflow.aio_run()
assert result["root_logger"]["status"] == "success"

View File

@@ -0,0 +1,3 @@
from examples.logger.workflow import logging_workflow
logging_workflow.run()

View File

@@ -0,0 +1,12 @@
from examples.logger.client import hatchet
from examples.logger.workflow import logging_workflow
def main() -> None:
worker = hatchet.worker("logger-worker", slots=5, workflows=[logging_workflow])
worker.start()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,39 @@
# > LoggingWorkflow
import logging
import time
from examples.logger.client import hatchet
from hatchet_sdk import Context, EmptyModel
logger = logging.getLogger(__name__)
logging_workflow = hatchet.workflow(
name="LoggingWorkflow",
)
@logging_workflow.task()
def root_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
for i in range(12):
logger.info("executed step1 - {}".format(i))
logger.info({"step1": "step1"})
time.sleep(0.1)
return {"status": "success"}
# > ContextLogger
@logging_workflow.task()
def context_logger(input: EmptyModel, ctx: Context) -> dict[str, str]:
for i in range(12):
ctx.log("executed step1 - {}".format(i))
ctx.log({"step1": "step1"})
time.sleep(0.1)
return {"status": "success"}