mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2025-12-20 08:10:26 -06:00
Feat: Python SDK Documentation, Part I (#1567)
* feat: initial mkdocs setup * chore: lock * fix: config + start getting docs working * fix: remove lots more redundant :type docs, update config more * feat: split up clients * feat: add pydoclint * fix: rm defaults from docstrings * fix: pydoclint errors * feat: run pydoclint in ci * fix: lint on 3.13 * debug: try explicit config path * fix: ignore venv * feat: index, styling * fix: rm footer * fix: more style tweaks * feat: generated docs * fix: refactor a bit * fix: regen * Revert "fix: regen" This reverts commit 7f66adc77840ad96d0eafe55c8dd467f71eb50fb. * feat: improve prompting * feat: add docs, modify theme config to enable toc for docs * fix: lint * fix: lint * feat: regenerate * feat: bs4 for html parsing * feat: preview correctly * fix: exclude site subdir from all the linters * refactor: break up script into components * feat: remove a bunch more stuff from the html * feat: prettier, enable toc * fix: enable tocs in more places + sort properly * fix: code blocks, ordering * fix: ordering * feat: finish up feature clients * fix: rm unused deps * fix: routing + property tags + sidebar * fix: hatchet client + formatting * fix: allow selecting single set of files * fix: lint * rm: cruft * fix: naming * fix: runs client attrs * fix: rm cruft page * feat: internal linking + top level description * [Python]: Fixing some more issues (#1573) * fix: pass priority through from the task * fix: improve eof handling slightly * chore: version * fix: improve eof handling * fix: send prio from durable * fix: naming * cleanup: use a variable * chore: version * feat: comment explaining page depth thing * chore: bump ver * feat: standalone docs * fix: prompting + heading levels
This commit is contained in:
5
.github/workflows/sdk-python.yml
vendored
5
.github/workflows/sdk-python.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.13"
|
||||
|
||||
- name: Install Poetry
|
||||
uses: snok/install-poetry@v1
|
||||
@@ -52,6 +52,9 @@ jobs:
|
||||
- name: Run Ruff
|
||||
run: poetry run ruff check .
|
||||
|
||||
- name: Run Pydoclint
|
||||
run: poetry run pydoclint .
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
"lint:check": "npm run prettier:check",
|
||||
"lint:fix": "npm run prettier:fix",
|
||||
"prettier:check": "prettier \"pages/**/*.{tsx,mdx}\" --list-different",
|
||||
"prettier:fix": "prettier \"pages/**/*.{tsx,mdx}\" --write"
|
||||
"prettier:fix": "prettier \"pages/**/*.{tsx,mdx,js}\" --write"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,74 +1,56 @@
|
||||
export default {
|
||||
"home": {
|
||||
"title": "User Guide",
|
||||
"type": "page"
|
||||
home: {
|
||||
title: "User Guide",
|
||||
type: "page",
|
||||
theme: {
|
||||
toc: false,
|
||||
},
|
||||
},
|
||||
"_setup": {
|
||||
"display": "hidden"
|
||||
_setup: {
|
||||
display: "hidden",
|
||||
},
|
||||
"compute": {
|
||||
"title": "Managed Compute",
|
||||
"type": "page",
|
||||
"href": "/home/compute",
|
||||
"index": "Overview",
|
||||
compute: {
|
||||
title: "Managed Compute",
|
||||
type: "page",
|
||||
href: "/home/compute",
|
||||
index: "Overview",
|
||||
"getting-started": "Getting Started",
|
||||
"cpu": "CPU Machine Types",
|
||||
"gpu": "GPU Machine Types",
|
||||
"-- SDKs": {
|
||||
"type": "separator",
|
||||
"title": "SDK Deployment Guides"
|
||||
},
|
||||
"python": {
|
||||
"title": "Python ↗",
|
||||
"href": "/sdks/python-sdk/docker"
|
||||
},
|
||||
"typescript": {
|
||||
"title": "TypeScript ↗",
|
||||
"href": "/sdks/typescript-sdk/docker"
|
||||
},
|
||||
"golang": {
|
||||
"title": "Golang ↗",
|
||||
"href": "/sdks/go-sdk"
|
||||
}
|
||||
cpu: "CPU Machine Types",
|
||||
gpu: "GPU Machine Types",
|
||||
},
|
||||
"self-hosting": {
|
||||
"title": "Self Hosting",
|
||||
"type": "page"
|
||||
},
|
||||
"blog": {
|
||||
"title": "Blog",
|
||||
"type": "page"
|
||||
},
|
||||
"contributing": {
|
||||
"title": "Contributing",
|
||||
"type": "page",
|
||||
"display": "hidden"
|
||||
},
|
||||
"sdks": {
|
||||
"title": "SDK Reference",
|
||||
"type": "menu",
|
||||
"items": {
|
||||
"python": {
|
||||
"title": "Python",
|
||||
"href": "/sdks/python-sdk",
|
||||
"type": "page"
|
||||
},
|
||||
"typescript": {
|
||||
"title": "TypeScript",
|
||||
"href": "/sdks/typescript-sdk",
|
||||
"type": "page"
|
||||
},
|
||||
"golang": {
|
||||
"title": "Golang",
|
||||
"href": "/sdks/go-sdk",
|
||||
"type": "page"
|
||||
}
|
||||
title: "Self Hosting",
|
||||
type: "page",
|
||||
theme: {
|
||||
toc: false,
|
||||
},
|
||||
"display": "hidden"
|
||||
},
|
||||
"v0": {
|
||||
"title": "V0 (Old docs)",
|
||||
"type": "page",
|
||||
"href": "https://v0-docs.hatchet.run"
|
||||
}
|
||||
}
|
||||
blog: {
|
||||
title: "Blog",
|
||||
type: "page",
|
||||
},
|
||||
contributing: {
|
||||
title: "Contributing",
|
||||
type: "page",
|
||||
display: "hidden",
|
||||
theme: {
|
||||
toc: false,
|
||||
},
|
||||
},
|
||||
sdks: {
|
||||
title: "SDK Reference",
|
||||
type: "menu",
|
||||
items: {
|
||||
python: {
|
||||
title: "Python",
|
||||
href: "/sdks/python/client",
|
||||
type: "page",
|
||||
},
|
||||
},
|
||||
},
|
||||
v0: {
|
||||
title: "V0 (Old docs)",
|
||||
type: "page",
|
||||
href: "https://v0-docs.hatchet.run",
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
export default {
|
||||
"task-queue-modern-python": {
|
||||
"title": "A task queue for modern Python applications"
|
||||
title: "A task queue for modern Python applications",
|
||||
},
|
||||
"postgres-events-table": {
|
||||
"title": "Use Postgres for your events table"
|
||||
title: "Use Postgres for your events table",
|
||||
},
|
||||
"migrating-off-prisma": {
|
||||
"title": "Why we moved off Prisma"
|
||||
title: "Why we moved off Prisma",
|
||||
},
|
||||
"problems-with-celery": {
|
||||
"title": "The problems with Celery",
|
||||
"display": "hidden",
|
||||
title: "The problems with Celery",
|
||||
display: "hidden",
|
||||
},
|
||||
"multi-tenant-queues": {
|
||||
"title": "An unfair advantage: multi-tenant queues in Postgres"
|
||||
title: "An unfair advantage: multi-tenant queues in Postgres",
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export default {
|
||||
"index": "Contributing",
|
||||
"github-app-setup": "GitHub App Setup",
|
||||
"sdks": "SDKs"
|
||||
}
|
||||
index: "Contributing",
|
||||
"github-app-setup": "GitHub App Setup",
|
||||
sdks: "SDKs",
|
||||
};
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
export default {
|
||||
"--intro": {
|
||||
"title": "Introduction",
|
||||
"type": "separator"
|
||||
title: "Introduction",
|
||||
type: "separator",
|
||||
},
|
||||
"index": "Introduction",
|
||||
index: "Introduction",
|
||||
"hatchet-cloud-quickstart": "Hatchet Cloud Quickstart",
|
||||
"--quickstart": {
|
||||
"title": "Quickstart",
|
||||
"type": "separator"
|
||||
title: "Quickstart",
|
||||
type: "separator",
|
||||
},
|
||||
"setup": "Setup",
|
||||
setup: "Setup",
|
||||
"your-first-task": "Tasks",
|
||||
"workers": "Workers",
|
||||
workers: "Workers",
|
||||
"running-your-task": "Running Tasks",
|
||||
"environments": "Environments",
|
||||
environments: "Environments",
|
||||
"--running-tasks": {
|
||||
"title": "Ways of Running Tasks",
|
||||
"type": "separator"
|
||||
title: "Ways of Running Tasks",
|
||||
type: "separator",
|
||||
},
|
||||
"running-tasks": "Introduction",
|
||||
"run-with-results": "Run and Wait Trigger",
|
||||
@@ -26,109 +26,109 @@ export default {
|
||||
"run-on-event": "Event Trigger",
|
||||
"bulk-run": "Bulk Run Many",
|
||||
"streaming-results": {
|
||||
"title": "Streaming Results",
|
||||
"display": "hidden"
|
||||
title: "Streaming Results",
|
||||
display: "hidden",
|
||||
},
|
||||
"--flow-control": {
|
||||
"title": "Flow Control",
|
||||
"type": "separator"
|
||||
title: "Flow Control",
|
||||
type: "separator",
|
||||
},
|
||||
"concurrency": "Concurrency",
|
||||
concurrency: "Concurrency",
|
||||
"rate-limits": "Rate Limits",
|
||||
"priority": "Priority",
|
||||
priority: "Priority",
|
||||
"running-from-external-services": {
|
||||
"title": "Running from External Services",
|
||||
"display": "hidden"
|
||||
title: "Running from External Services",
|
||||
display: "hidden",
|
||||
},
|
||||
"--advanced-workflows": {
|
||||
"title": "Workflows",
|
||||
"type": "separator"
|
||||
title: "Workflows",
|
||||
type: "separator",
|
||||
},
|
||||
"orchestration": "Task Orchestration",
|
||||
"dags": {
|
||||
"title": "Directed Acyclic Graphs (DAGs)"
|
||||
orchestration: "Task Orchestration",
|
||||
dags: {
|
||||
title: "Directed Acyclic Graphs (DAGs)",
|
||||
},
|
||||
"conditional-workflows": "Conditional Workflows",
|
||||
"on-failure-tasks": "On Failure Tasks",
|
||||
"child-spawning": {
|
||||
"title": "Child Spawning"
|
||||
title: "Child Spawning",
|
||||
},
|
||||
"additional-metadata": {
|
||||
"title": "Additional Metadata"
|
||||
title: "Additional Metadata",
|
||||
},
|
||||
"--durable-execution": {
|
||||
"title": "Durable Execution",
|
||||
"type": "separator"
|
||||
title: "Durable Execution",
|
||||
type: "separator",
|
||||
},
|
||||
"durable-execution": {
|
||||
"title": "Durable Execution"
|
||||
title: "Durable Execution",
|
||||
},
|
||||
"durable-events": {
|
||||
"title": "Durable Events"
|
||||
title: "Durable Events",
|
||||
},
|
||||
"durable-sleep": {
|
||||
"title": "Durable Sleep"
|
||||
title: "Durable Sleep",
|
||||
},
|
||||
"durable-best-practices": {
|
||||
"title": "Best Practices"
|
||||
title: "Best Practices",
|
||||
},
|
||||
"--error-handling": {
|
||||
"title": "Error Handling",
|
||||
"type": "separator"
|
||||
title: "Error Handling",
|
||||
type: "separator",
|
||||
},
|
||||
"timeouts": "Timeouts",
|
||||
timeouts: "Timeouts",
|
||||
"retry-policies": "Retry Policies",
|
||||
"bulk-retries-and-cancellations": "Bulk Retries and Cancellations",
|
||||
|
||||
"--assignment": {
|
||||
"title": "Advanced Assignment",
|
||||
"type": "separator"
|
||||
title: "Advanced Assignment",
|
||||
type: "separator",
|
||||
},
|
||||
"sticky-assignment": "Sticky Assignment",
|
||||
"worker-affinity": "Worker Affinity",
|
||||
"manual-slot-release": "Manual Slot Release",
|
||||
"--observability": {
|
||||
"title": "Observability",
|
||||
"type": "separator"
|
||||
title: "Observability",
|
||||
type: "separator",
|
||||
},
|
||||
"logging": "Logging",
|
||||
"opentelemetry": "OpenTelemetry",
|
||||
logging: "Logging",
|
||||
opentelemetry: "OpenTelemetry",
|
||||
"--deploying-workers": {
|
||||
"title": "Deploying Workers",
|
||||
"type": "separator"
|
||||
title: "Deploying Workers",
|
||||
type: "separator",
|
||||
},
|
||||
"docker": "Running with Docker",
|
||||
"compute": "Managed Compute",
|
||||
docker: "Running with Docker",
|
||||
compute: "Managed Compute",
|
||||
"worker-healthchecks": "Worker Health Checks",
|
||||
"--cancellation": {
|
||||
"title": "Cancellation",
|
||||
"type": "separator",
|
||||
"display": "hidden"
|
||||
title: "Cancellation",
|
||||
type: "separator",
|
||||
display: "hidden",
|
||||
},
|
||||
"cancellation": {
|
||||
"title": "Cancellation",
|
||||
cancellation: {
|
||||
title: "Cancellation",
|
||||
},
|
||||
"--v1-migration-guides": {
|
||||
"title": "V1 Migration Guides",
|
||||
"type": "separator"
|
||||
title: "V1 Migration Guides",
|
||||
type: "separator",
|
||||
},
|
||||
"v1-sdk-improvements": {
|
||||
"title": "SDK Improvements"
|
||||
title: "SDK Improvements",
|
||||
},
|
||||
"migration-guide-engine": "Engine Migration Guide",
|
||||
"migration-guide-python": "Python Migration Guide",
|
||||
"migration-guide-typescript": "Typescript Migration Guide",
|
||||
"migration-guide-go": "Go Migration Guide",
|
||||
"--python": {
|
||||
"title": "Python Specifics",
|
||||
"type": "separator"
|
||||
title: "Python Specifics",
|
||||
type: "separator",
|
||||
},
|
||||
"asyncio": "Asyncio",
|
||||
"pydantic": "Pydantic",
|
||||
"lifespans": "Lifespans",
|
||||
"blog": {
|
||||
"title": "Blog",
|
||||
"type": "page",
|
||||
"href": "/blog"
|
||||
}
|
||||
}
|
||||
asyncio: "Asyncio",
|
||||
pydantic: "Pydantic",
|
||||
lifespans: "Lifespans",
|
||||
blog: {
|
||||
title: "Blog",
|
||||
type: "page",
|
||||
href: "/blog",
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
export default {
|
||||
"index": "Overview",
|
||||
index: "Overview",
|
||||
"getting-started": "Getting Started",
|
||||
"cpu": "CPU Machine Types",
|
||||
"gpu": "GPU Machine Types",
|
||||
cpu: "CPU Machine Types",
|
||||
gpu: "GPU Machine Types",
|
||||
"git-ops": "GitOps",
|
||||
"auto-scaling": "Auto Scaling",
|
||||
"environment-variables": "Environment Variables",
|
||||
}
|
||||
};
|
||||
|
||||
9
frontend/docs/pages/sdks/_meta.js
Normal file
9
frontend/docs/pages/sdks/_meta.js
Normal file
@@ -0,0 +1,9 @@
|
||||
export default {
|
||||
python: {
|
||||
title: "Python SDK",
|
||||
type: "page",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
22
frontend/docs/pages/sdks/python/_meta.js
Normal file
22
frontend/docs/pages/sdks/python/_meta.js
Normal file
@@ -0,0 +1,22 @@
|
||||
export default {
|
||||
client: {
|
||||
title: "Client",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
"feature-clients": {
|
||||
title: "Feature Clients",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
runnables: {
|
||||
title: "Runnables",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
176
frontend/docs/pages/sdks/python/client.mdx
Normal file
176
frontend/docs/pages/sdks/python/client.mdx
Normal file
@@ -0,0 +1,176 @@
|
||||
# Hatchet Python SDK Reference
|
||||
|
||||
This is the Python SDK reference, documenting methods available for interacting with Hatchet resources. Check out the [user guide](../../home) for an introduction for getting your first tasks running.
|
||||
|
||||
## The Hatchet Python Client
|
||||
|
||||
Main client for interacting with the Hatchet SDK.
|
||||
|
||||
This class provides access to various client interfaces and utility methods for working with Hatchet workers, workflows, tasks, and our various feature clients.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| -------------- | ------------------------------------------------------------------------------------------------------------- |
|
||||
| `worker` | Create a Hatchet worker on which to run workflows. |
|
||||
| `workflow` | Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on. |
|
||||
| `task` | A decorator to transform a function into a standalone Hatchet task that runs as part of a workflow. |
|
||||
| `durable_task` | A decorator to transform a function into a standalone Hatchet _durable_ task that runs as part of a workflow. |
|
||||
|
||||
### Attributes
|
||||
|
||||
#### `cron`
|
||||
|
||||
The cron client is a client for managing cron workflows within Hatchet.
|
||||
|
||||
#### `event`
|
||||
|
||||
The event client, which you can use to push events to Hatchet.
|
||||
|
||||
#### `logs`
|
||||
|
||||
The logs client is a client for interacting with Hatchet's logs API.
|
||||
|
||||
#### `metrics`
|
||||
|
||||
The metrics client is a client for reading metrics out of Hatchet's metrics API.
|
||||
|
||||
#### `rate_limits`
|
||||
|
||||
The rate limits client is a wrapper for Hatchet's gRPC API that makes it easier to work with rate limits in Hatchet.
|
||||
|
||||
#### `runs`
|
||||
|
||||
The runs client is a client for interacting with task and workflow runs within Hatchet.
|
||||
|
||||
#### `scheduled`
|
||||
|
||||
The scheduled client is a client for managing scheduled workflows within Hatchet.
|
||||
|
||||
#### `workers`
|
||||
|
||||
The workers client is a client for managing workers programmatically within Hatchet.
|
||||
|
||||
#### `workflows`
|
||||
|
||||
The workflows client is a client for managing workflows programmatically within Hatchet.
|
||||
|
||||
Note that workflows are the declaration, _not_ the individual runs. If you're looking for runs, use the `RunsClient` instead.
|
||||
|
||||
#### `tenant_id`
|
||||
|
||||
The tenant id you're operating in.
|
||||
|
||||
#### `namespace`
|
||||
|
||||
The current namespace you're interacting with.
|
||||
|
||||
### Functions
|
||||
|
||||
#### `worker`
|
||||
|
||||
Create a Hatchet worker on which to run workflows.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------- | ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------- |
|
||||
| `name` | `str` | The name of the worker. | _required_ |
|
||||
| `slots` | `int` | The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time | `100` |
|
||||
| `durable_slots` | `int` | The number of durable workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time that are durable. | `1000` |
|
||||
| `labels` | `dict[str, Union[str, int]]` | A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. | `{}` |
|
||||
| `workflows` | `list[BaseWorkflow[Any]]` | A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. | `[]` |
|
||||
| `lifespan` | `LifespanFn \| None` | A lifespan function to run on the worker. This function will be called when the worker is started, and can be used to perform any setup or teardown tasks. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------- | -------------------------------------------------------------------------------------------------------------- |
|
||||
| `Worker` | The created `Worker` object, which exposes an instance method `start` which can be called to start the worker. |
|
||||
|
||||
#### `workflow`
|
||||
|
||||
Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------ | -------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------- |
|
||||
| `name` | `str` | The name of the workflow. | _required_ |
|
||||
| `description` | `str \| None` | A description for the workflow | `None` |
|
||||
| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the `input` to the tasks in the workflow. If no validator is provided, defaults to an `EmptyModel` under the hood. The `EmptyModel` is a Pydantic model with no fields specified, and with the `extra` config option set to `"allow"`. | `None` |
|
||||
| `on_events` | `list[str]` | A list of event triggers for the workflow - events which cause the workflow to be run. | `[]` |
|
||||
| `on_crons` | `list[str]` | A list of cron triggers for the workflow. | `[]` |
|
||||
| `version` | `str \| None` | A version for the workflow | `None` |
|
||||
| `sticky` | `StickyStrategy \| None` | A sticky strategy for the workflow | `None` |
|
||||
| `default_priority` | `int` | The priority of the workflow. Higher values will cause this workflow to have priority in scheduling over other, lower priority ones. | `1` |
|
||||
| `concurrency` | `ConcurrencyExpression \| list[ConcurrencyExpression] \| None` | A concurrency object controlling the concurrency settings for this workflow. | `None` |
|
||||
| `task_defaults` | `TaskDefaults` | A `TaskDefaults` object controlling the default task settings for this workflow. | `TaskDefaults()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------------------------------------- | ----------------------------------------------------------------------------------------------- |
|
||||
| `Workflow[EmptyModel] \| Workflow[TWorkflowInput]` | The created `Workflow` object, which can be used to declare tasks, run the workflow, and so on. |
|
||||
|
||||
#### `task`
|
||||
|
||||
A decorator to transform a function into a standalone Hatchet task that runs as part of a workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------------- | -------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | --------------------------- |
|
||||
| `name` | `str` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | _required_ |
|
||||
| `description` | `str \| None` | An optional description for the task. | `None` |
|
||||
| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`. | `None` |
|
||||
| `on_events` | `list[str]` | A list of event triggers for the task - events which cause the task to be run. | `[]` |
|
||||
| `on_crons` | `list[str]` | A list of cron triggers for the task. | `[]` |
|
||||
| `version` | `str \| None` | A version for the task. | `None` |
|
||||
| `sticky` | `StickyStrategy \| None` | A sticky strategy for the task. | `None` |
|
||||
| `default_priority` | `int` | The priority of the task. Higher values will cause this task to have priority in scheduling. | `1` |
|
||||
| `concurrency` | `ConcurrencyExpression \| list[ConcurrencyExpression] \| None` | A concurrency object controlling the concurrency settings for this task. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time allowed for scheduling the task. | `DEFAULT_SCHEDULE_TIMEOUT` |
|
||||
| `execution_timeout` | `Duration` | The maximum time allowed for executing the task. | `DEFAULT_EXECUTION_TIMEOUT` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `{}` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------- |
|
||||
| `Callable[[Callable[[EmptyModel, Context], R]], Standalone[EmptyModel, R]] \| Callable[[Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]]` | A decorator which creates a `Standalone` task object. |
|
||||
|
||||
#### `durable_task`
|
||||
|
||||
A decorator to transform a function into a standalone Hatchet _durable_ task that runs as part of a workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------------- | ------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | --------------------------- |
|
||||
| `name` | `str` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | _required_ |
|
||||
| `description` | `str \| None` | An optional description for the task. | `None` |
|
||||
| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`. | `None` |
|
||||
| `on_events` | `list[str]` | A list of event triggers for the task - events which cause the task to be run. | `[]` |
|
||||
| `on_crons` | `list[str]` | A list of cron triggers for the task. | `[]` |
|
||||
| `version` | `str \| None` | A version for the task. | `None` |
|
||||
| `sticky` | `StickyStrategy \| None` | A sticky strategy for the task. | `None` |
|
||||
| `default_priority` | `int` | The priority of the task. Higher values will cause this task to have priority in scheduling. | `1` |
|
||||
| `concurrency` | `ConcurrencyExpression \| None` | A concurrency object controlling the concurrency settings for this task. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time allowed for scheduling the task. | `DEFAULT_SCHEDULE_TIMEOUT` |
|
||||
| `execution_timeout` | `Duration` | The maximum time allowed for executing the task. | `DEFAULT_EXECUTION_TIMEOUT` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `{}` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------- |
|
||||
| `Callable[[Callable[[EmptyModel, DurableContext], R]], Standalone[EmptyModel, R]] \| Callable[[Callable[[TWorkflowInput, DurableContext], R]], Standalone[TWorkflowInput, R]]` | A decorator which creates a `Standalone` task object. |
|
||||
57
frontend/docs/pages/sdks/python/feature-clients/_meta.js
Normal file
57
frontend/docs/pages/sdks/python/feature-clients/_meta.js
Normal file
@@ -0,0 +1,57 @@
|
||||
export default {
|
||||
cron: {
|
||||
title: "Cron",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
logs: {
|
||||
title: "Logs",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
metrics: {
|
||||
title: "Metrics",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
rate_limits: {
|
||||
title: "Rate Limits",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
runs: {
|
||||
title: "Runs",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
scheduled: {
|
||||
title: "Scheduled",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
workers: {
|
||||
title: "Workers",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
|
||||
workflows: {
|
||||
title: "Workflows",
|
||||
theme: {
|
||||
toc: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
168
frontend/docs/pages/sdks/python/feature-clients/cron.mdx
Normal file
168
frontend/docs/pages/sdks/python/feature-clients/cron.mdx
Normal file
@@ -0,0 +1,168 @@
|
||||
# Cron Client
|
||||
|
||||
Bases: `BaseRestClient`
|
||||
|
||||
The cron client is a client for managing cron workflows within Hatchet.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ------------ | -------------------------------------------------------------------- |
|
||||
| `aio_create` | Create a new workflow cron trigger. |
|
||||
| `aio_delete` | Delete a workflow cron trigger. |
|
||||
| `aio_get` | Retrieve a specific workflow cron trigger by ID. |
|
||||
| `aio_list` | Retrieve a list of all workflow cron triggers matching the criteria. |
|
||||
| `create` | Create a new workflow cron trigger. |
|
||||
| `delete` | Delete a workflow cron trigger. |
|
||||
| `get` | Retrieve a specific workflow cron trigger by ID. |
|
||||
| `list` | Retrieve a list of all workflow cron triggers matching the criteria. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `aio_create`
|
||||
|
||||
Create a new workflow cron trigger.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to trigger. | _required_ |
|
||||
| `cron_name` | `str` | The name of the cron trigger. | _required_ |
|
||||
| `expression` | `str` | The cron expression defining the schedule. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the cron workflow. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata associated with the cron trigger. | _required_ |
|
||||
| `priority` | `int \| None` | The priority of the cron workflow trigger. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ----------------------------------- |
|
||||
| `CronWorkflows` | The created cron workflow instance. |
|
||||
|
||||
#### `aio_delete`
|
||||
|
||||
Delete a workflow cron trigger.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ----- | ------------------------------------- | ---------- |
|
||||
| `cron_id` | `str` | The ID of the cron trigger to delete. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `aio_get`
|
||||
|
||||
Retrieve a specific workflow cron trigger by ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ----- | ---------------------------------------------------------- | ---------- |
|
||||
| `cron_id` | `str` | The cron trigger ID or CronWorkflows instance to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ------------------------------------- |
|
||||
| `CronWorkflows` | The requested cron workflow instance. |
|
||||
|
||||
#### `aio_list`
|
||||
|
||||
Retrieve a list of all workflow cron triggers matching the criteria.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------------------- | -------------------------------------- | ------- |
|
||||
| `offset` | `int \| None` | The offset to start the list from. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of items to return. | `None` |
|
||||
| `workflow_id` | `str \| None` | The ID of the workflow to filter by. | `None` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Filter by additional metadata keys. | `None` |
|
||||
| `order_by_field` | `CronWorkflowsOrderByField \| None` | The field to order the list by. | `None` |
|
||||
| `order_by_direction` | `WorkflowRunOrderByDirection \| None` | The direction to order the list by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------- | ------------------------- |
|
||||
| `CronWorkflowsList` | A list of cron workflows. |
|
||||
|
||||
#### `create`
|
||||
|
||||
Create a new workflow cron trigger.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to trigger. | _required_ |
|
||||
| `cron_name` | `str` | The name of the cron trigger. | _required_ |
|
||||
| `expression` | `str` | The cron expression defining the schedule. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the cron workflow. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata associated with the cron trigger. | _required_ |
|
||||
| `priority` | `int \| None` | The priority of the cron workflow trigger. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ----------------------------------- |
|
||||
| `CronWorkflows` | The created cron workflow instance. |
|
||||
|
||||
#### `delete`
|
||||
|
||||
Delete a workflow cron trigger.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ----- | ------------------------------------- | ---------- |
|
||||
| `cron_id` | `str` | The ID of the cron trigger to delete. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `get`
|
||||
|
||||
Retrieve a specific workflow cron trigger by ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ----- | ---------------------------------------------------------- | ---------- |
|
||||
| `cron_id` | `str` | The cron trigger ID or CronWorkflows instance to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ------------------------------------- |
|
||||
| `CronWorkflows` | The requested cron workflow instance. |
|
||||
|
||||
#### `list`
|
||||
|
||||
Retrieve a list of all workflow cron triggers matching the criteria.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------------------- | -------------------------------------- | ------- |
|
||||
| `offset` | `int \| None` | The offset to start the list from. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of items to return. | `None` |
|
||||
| `workflow_id` | `str \| None` | The ID of the workflow to filter by. | `None` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Filter by additional metadata keys. | `None` |
|
||||
| `order_by_field` | `CronWorkflowsOrderByField \| None` | The field to order the list by. | `None` |
|
||||
| `order_by_direction` | `WorkflowRunOrderByDirection \| None` | The direction to order the list by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------- | ------------------------- |
|
||||
| `CronWorkflowsList` | A list of cron workflows. |
|
||||
46
frontend/docs/pages/sdks/python/feature-clients/logs.mdx
Normal file
46
frontend/docs/pages/sdks/python/feature-clients/logs.mdx
Normal file
@@ -0,0 +1,46 @@
|
||||
# Logs Client
|
||||
|
||||
Bases: `BaseRestClient`
|
||||
|
||||
The logs client is a client for interacting with Hatchet's logs API.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ---------- | ------------------------------------ |
|
||||
| `aio_list` | List log lines for a given task run. |
|
||||
| `list` | List log lines for a given task run. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `aio_list`
|
||||
|
||||
List log lines for a given task run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | ----- | ---------------------------------------- | ---------- |
|
||||
| `task_run_id` | `str` | The ID of the task run to list logs for. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ----------------------------------------------- |
|
||||
| `V1LogLineList` | A list of log lines for the specified task run. |
|
||||
|
||||
#### `list`
|
||||
|
||||
List log lines for a given task run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | ----- | ---------------------------------------- | ---------- |
|
||||
| `task_run_id` | `str` | The ID of the task run to list logs for. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ----------------------------------------------- |
|
||||
| `V1LogLineList` | A list of log lines for the specified task run. |
|
||||
108
frontend/docs/pages/sdks/python/feature-clients/metrics.mdx
Normal file
108
frontend/docs/pages/sdks/python/feature-clients/metrics.mdx
Normal file
@@ -0,0 +1,108 @@
|
||||
# Metrics Client
|
||||
|
||||
Bases: `BaseRestClient`
|
||||
|
||||
The metrics client is a client for reading metrics out of Hatchet's metrics API.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| -------------------------- | ------------------------------------------------------------------------- |
|
||||
| `aio_get_queue_metrics` | Retrieve queue metrics for a set of workflow ids and additional metadata. |
|
||||
| `aio_get_task_metrics` | Retrieve queue metrics |
|
||||
| `aio_get_workflow_metrics` | Retrieve workflow metrics for a given workflow ID. |
|
||||
| `get_queue_metrics` | Retrieve queue metrics for a set of workflow ids and additional metadata. |
|
||||
| `get_task_metrics` | Retrieve queue metrics |
|
||||
| `get_workflow_metrics` | Retrieve workflow metrics for a given workflow ID. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `aio_get_queue_metrics`
|
||||
|
||||
Retrieve queue metrics for a set of workflow ids and additional metadata.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | --------------------------------- | ----------------------------------------------- | ------- |
|
||||
| `workflow_ids` | `list[str] \| None` | A list of workflow IDs to retrieve metrics for. | `None` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata to filter the metrics by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------- | ------------------------------------------------ |
|
||||
| `TenantQueueMetrics` | Workflow metrics for the specified workflow IDs. |
|
||||
|
||||
#### `aio_get_task_metrics`
|
||||
|
||||
Retrieve queue metrics
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------------------- | ------------------------------------- |
|
||||
| `TenantStepRunQueueMetrics` | Step run queue metrics for the tenant |
|
||||
|
||||
#### `aio_get_workflow_metrics`
|
||||
|
||||
Retrieve workflow metrics for a given workflow ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | --------------------------- | ----------------------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to retrieve metrics for. | _required_ |
|
||||
| `status` | `WorkflowRunStatus \| None` | The status of the workflow run to filter by. | `None` |
|
||||
| `group_key` | `str \| None` | The key to group the metrics by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------- | ----------------------------------------------- |
|
||||
| `WorkflowMetrics` | Workflow metrics for the specified workflow ID. |
|
||||
|
||||
#### `get_queue_metrics`
|
||||
|
||||
Retrieve queue metrics for a set of workflow ids and additional metadata.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | --------------------------------- | ----------------------------------------------- | ------- |
|
||||
| `workflow_ids` | `list[str] \| None` | A list of workflow IDs to retrieve metrics for. | `None` |
|
||||
| `additional_metadata` | `JSONSerializableMapping \| None` | Additional metadata to filter the metrics by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------- | ------------------------------------------------ |
|
||||
| `TenantQueueMetrics` | Workflow metrics for the specified workflow IDs. |
|
||||
|
||||
#### `get_task_metrics`
|
||||
|
||||
Retrieve queue metrics
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------------------- | ------------------------------------- |
|
||||
| `TenantStepRunQueueMetrics` | Step run queue metrics for the tenant |
|
||||
|
||||
#### `get_workflow_metrics`
|
||||
|
||||
Retrieve workflow metrics for a given workflow ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | --------------------------- | ----------------------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to retrieve metrics for. | _required_ |
|
||||
| `status` | `WorkflowRunStatus \| None` | The status of the workflow run to filter by. | `None` |
|
||||
| `group_key` | `str \| None` | The key to group the metrics by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------- | ----------------------------------------------- |
|
||||
| `WorkflowMetrics` | Workflow metrics for the specified workflow ID. |
|
||||
@@ -0,0 +1,50 @@
|
||||
# Rate Limits Client
|
||||
|
||||
Bases: `BaseRestClient`
|
||||
|
||||
The rate limits client is a wrapper for Hatchet's gRPC API that makes it easier to work with rate limits in Hatchet.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| --------- | --------------------------------- |
|
||||
| `aio_put` | Put a rate limit for a given key. |
|
||||
| `put` | Put a rate limit for a given key. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `aio_put`
|
||||
|
||||
Put a rate limit for a given key.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ---------- | ------------------- | ---------------------------------- | ---------- |
|
||||
| `key` | `str` | The key to set the rate limit for. | _required_ |
|
||||
| `limit` | `int` | The rate limit to set. | _required_ |
|
||||
| `duration` | `RateLimitDuration` | The duration of the rate limit. | `SECOND` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `put`
|
||||
|
||||
Put a rate limit for a given key.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ---------- | ------------------- | ---------------------------------- | ---------- |
|
||||
| `key` | `str` | The key to set the rate limit for. | _required_ |
|
||||
| `limit` | `int` | The rate limit to set. | _required_ |
|
||||
| `duration` | `RateLimitDuration` | The duration of the rate limit. | `SECOND` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
329
frontend/docs/pages/sdks/python/feature-clients/runs.mdx
Normal file
329
frontend/docs/pages/sdks/python/feature-clients/runs.mdx
Normal file
@@ -0,0 +1,329 @@
|
||||
# Runs Client
|
||||
|
||||
Bases: `BaseRestClient`
|
||||
|
||||
The runs client is a client for interacting with task and workflow runs within Hatchet.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ----------------- | -------------------------------------------------------------------- |
|
||||
| `get` | Get workflow run details for a given workflow run ID. |
|
||||
| `aio_get` | Get workflow run details for a given workflow run ID. |
|
||||
| `list` | List task runs according to a set of filters. |
|
||||
| `aio_list` | List task runs according to a set of filters. |
|
||||
| `create` | Trigger a new workflow run. |
|
||||
| `aio_create` | Trigger a new workflow run. |
|
||||
| `replay` | Replay a task or workflow run. |
|
||||
| `aio_replay` | Replay a task or workflow run. |
|
||||
| `bulk_replay` | Replay task or workflow runs in bulk, according to a set of filters. |
|
||||
| `aio_bulk_replay` | Replay task or workflow runs in bulk, according to a set of filters. |
|
||||
| `cancel` | Cancel a task or workflow run. |
|
||||
| `aio_cancel` | Cancel a task or workflow run. |
|
||||
| `bulk_cancel` | Cancel task or workflow runs in bulk, according to a set of filters. |
|
||||
| `aio_bulk_cancel` | Cancel task or workflow runs in bulk, according to a set of filters. |
|
||||
| `get_result` | Get the result of a workflow run by its external ID. |
|
||||
| `aio_get_result` | Get the result of a workflow run by its external ID. |
|
||||
| `get_run_ref` | Get a reference to a workflow run. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `get`
|
||||
|
||||
Get workflow run details for a given workflow run ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------- | ----- | --------------------------------------------------- | ---------- |
|
||||
| `workflow_run_id` | `str` | The ID of the workflow run to retrieve details for. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------- | ------------------------------------------------------- |
|
||||
| `V1WorkflowRunDetails` | Workflow run details for the specified workflow run ID. |
|
||||
|
||||
#### `aio_get`
|
||||
|
||||
Get workflow run details for a given workflow run ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------- | ----- | --------------------------------------------------- | ---------- |
|
||||
| `workflow_run_id` | `str` | The ID of the workflow run to retrieve details for. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------- | ------------------------------------------------------- |
|
||||
| `V1WorkflowRunDetails` | Workflow run details for the specified workflow run ID. |
|
||||
|
||||
#### `list`
|
||||
|
||||
List task runs according to a set of filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------------- | ---------------------------- | --------------------------------------------------- | ---------------------------- |
|
||||
| `since` | `datetime` | The start time for filtering task runs. | `now() - timedelta(hours=1)` |
|
||||
| `only_tasks` | `bool` | Whether to only list task runs. | `False` |
|
||||
| `offset` | `int \| None` | The offset for pagination. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of task runs to return. | `None` |
|
||||
| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter task runs by. | `None` |
|
||||
| `until` | `datetime \| None` | The end time for filtering task runs. | `None` |
|
||||
| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter task runs by. | `None` |
|
||||
| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter task runs by. | `None` |
|
||||
| `worker_id` | `str \| None` | The worker ID to filter task runs by. | `None` |
|
||||
| `parent_task_external_id` | `str \| None` | The parent task external ID to filter task runs by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------- | --------------------------------------------------- |
|
||||
| `V1TaskSummaryList` | A list of task runs matching the specified filters. |
|
||||
|
||||
#### `aio_list`
|
||||
|
||||
List task runs according to a set of filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------------- | ---------------------------- | --------------------------------------------------- | ---------------------------- |
|
||||
| `since` | `datetime` | The start time for filtering task runs. | `now() - timedelta(hours=1)` |
|
||||
| `only_tasks` | `bool` | Whether to only list task runs. | `False` |
|
||||
| `offset` | `int \| None` | The offset for pagination. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of task runs to return. | `None` |
|
||||
| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter task runs by. | `None` |
|
||||
| `until` | `datetime \| None` | The end time for filtering task runs. | `None` |
|
||||
| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter task runs by. | `None` |
|
||||
| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter task runs by. | `None` |
|
||||
| `worker_id` | `str \| None` | The worker ID to filter task runs by. | `None` |
|
||||
| `parent_task_external_id` | `str \| None` | The parent task external ID to filter task runs by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------- | --------------------------------------------------- |
|
||||
| `V1TaskSummaryList` | A list of task runs matching the specified filters. |
|
||||
|
||||
#### `create`
|
||||
|
||||
Trigger a new workflow run.
|
||||
|
||||
IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdks/python/runnables#workflow).
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to trigger. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the workflow run. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata associated with the workflow run. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the workflow run. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------- | ------------------------------------------ |
|
||||
| `V1WorkflowRunDetails` | The details of the triggered workflow run. |
|
||||
|
||||
#### `aio_create`
|
||||
|
||||
Trigger a new workflow run.
|
||||
|
||||
IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdks/python/runnables#workflow).
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to trigger. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the workflow run. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata associated with the workflow run. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the workflow run. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------- | ------------------------------------------ |
|
||||
| `V1WorkflowRunDetails` | The details of the triggered workflow run. |
|
||||
|
||||
#### `replay`
|
||||
|
||||
Replay a task or workflow run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------- | ----- | ------------------------------------------------------ | ---------- |
|
||||
| `run_id` | `str` | The external ID of the task or workflow run to replay. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `aio_replay`
|
||||
|
||||
Replay a task or workflow run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------- | ----- | ------------------------------------------------------ | ---------- |
|
||||
| `run_id` | `str` | The external ID of the task or workflow run to replay. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `bulk_replay`
|
||||
|
||||
Replay task or workflow runs in bulk, according to a set of filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------ | ---------------------- | --------------------------------------------------- | ---------- |
|
||||
| `opts` | `BulkCancelReplayOpts` | Options for bulk replay, including filters and IDs. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `aio_bulk_replay`
|
||||
|
||||
Replay task or workflow runs in bulk, according to a set of filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------ | ---------------------- | --------------------------------------------------- | ---------- |
|
||||
| `opts` | `BulkCancelReplayOpts` | Options for bulk replay, including filters and IDs. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `cancel`
|
||||
|
||||
Cancel a task or workflow run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------- | ----- | ------------------------------------------------------ | ---------- |
|
||||
| `run_id` | `str` | The external ID of the task or workflow run to cancel. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `aio_cancel`
|
||||
|
||||
Cancel a task or workflow run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------- | ----- | ------------------------------------------------------ | ---------- |
|
||||
| `run_id` | `str` | The external ID of the task or workflow run to cancel. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `bulk_cancel`
|
||||
|
||||
Cancel task or workflow runs in bulk, according to a set of filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------ | ---------------------- | --------------------------------------------------- | ---------- |
|
||||
| `opts` | `BulkCancelReplayOpts` | Options for bulk cancel, including filters and IDs. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `aio_bulk_cancel`
|
||||
|
||||
Cancel task or workflow runs in bulk, according to a set of filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------ | ---------------------- | --------------------------------------------------- | ---------- |
|
||||
| `opts` | `BulkCancelReplayOpts` | Options for bulk cancel, including filters and IDs. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `get_result`
|
||||
|
||||
Get the result of a workflow run by its external ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------- | ----- | --------------------------------------------------------------- | ---------- |
|
||||
| `run_id` | `str` | The external ID of the workflow run to retrieve the result for. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------- | ------------------------------- |
|
||||
| `JSONSerializableMapping` | The result of the workflow run. |
|
||||
|
||||
#### `aio_get_result`
|
||||
|
||||
Get the result of a workflow run by its external ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------- | ----- | --------------------------------------------------------------- | ---------- |
|
||||
| `run_id` | `str` | The external ID of the workflow run to retrieve the result for. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------- | ------------------------------- |
|
||||
| `JSONSerializableMapping` | The result of the workflow run. |
|
||||
|
||||
#### `get_run_ref`
|
||||
|
||||
Get a reference to a workflow run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------- | ----- | ------------------------------------------------- | ---------- |
|
||||
| `workflow_run_id` | `str` | The ID of the workflow run to get a reference to. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------- | ------------------------------------------ |
|
||||
| `WorkflowRunRef` | A reference to the specified workflow run. |
|
||||
172
frontend/docs/pages/sdks/python/feature-clients/scheduled.mdx
Normal file
172
frontend/docs/pages/sdks/python/feature-clients/scheduled.mdx
Normal file
@@ -0,0 +1,172 @@
|
||||
# Scheduled Client
|
||||
|
||||
Bases: `BaseRestClient`
|
||||
|
||||
The scheduled client is a client for managing scheduled workflows within Hatchet.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ------------ | -------------------------------------------------------------------- |
|
||||
| `aio_create` | Creates a new scheduled workflow run. |
|
||||
| `aio_delete` | Deletes a scheduled workflow run by its ID. |
|
||||
| `aio_get` | Retrieves a specific scheduled workflow by scheduled run trigger ID. |
|
||||
| `aio_list` | Retrieves a list of scheduled workflows based on provided filters. |
|
||||
| `create` | Creates a new scheduled workflow run. |
|
||||
| `delete` | Deletes a scheduled workflow run by its ID. |
|
||||
| `get` | Retrieves a specific scheduled workflow by scheduled run trigger ID. |
|
||||
| `list` | Retrieves a list of scheduled workflows based on provided filters. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `aio_create`
|
||||
|
||||
Creates a new scheduled workflow run.
|
||||
|
||||
IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdks/python/runnables#workflow).
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to schedule. | _required_ |
|
||||
| `trigger_at` | `datetime` | The datetime when the run should be triggered. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the scheduled workflow. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata associated with the future run as a key-value pair. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------- | ---------------------------------------- |
|
||||
| `ScheduledWorkflows` | The created scheduled workflow instance. |
|
||||
|
||||
#### `aio_delete`
|
||||
|
||||
Deletes a scheduled workflow run by its ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------------- | ----- | ----------------------------------------------- | ---------- |
|
||||
| `scheduled_id` | `str` | The ID of the scheduled workflow run to delete. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `aio_get`
|
||||
|
||||
Retrieves a specific scheduled workflow by scheduled run trigger ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------------- | ----- | ---------------------------------------------- | ---------- |
|
||||
| `scheduled_id` | `str` | The scheduled workflow trigger ID to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------- | ------------------------------------------ |
|
||||
| `ScheduledWorkflows` | The requested scheduled workflow instance. |
|
||||
|
||||
#### `aio_list`
|
||||
|
||||
Retrieves a list of scheduled workflows based on provided filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------------ | ------------------------------------------ | ---------------------------------------------------- | ------- |
|
||||
| `offset` | `int \| None` | The offset to use in pagination. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of scheduled workflows to return. | `None` |
|
||||
| `workflow_id` | `str \| None` | The ID of the workflow to filter by. | `None` |
|
||||
| `parent_workflow_run_id` | `str \| None` | The ID of the parent workflow run to filter by. | `None` |
|
||||
| `statuses` | `list[ScheduledRunStatus] \| None` | A list of statuses to filter by. | `None` |
|
||||
| `additional_metadata` | `Optional[JSONSerializableMapping]` | Additional metadata to filter by. | `None` |
|
||||
| `order_by_field` | `Optional[ScheduledWorkflowsOrderByField]` | The field to order the results by. | `None` |
|
||||
| `order_by_direction` | `Optional[WorkflowRunOrderByDirection]` | The direction to order the results by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------ | ------------------------------------------------------------ |
|
||||
| `ScheduledWorkflowsList` | A list of scheduled workflows matching the provided filters. |
|
||||
|
||||
#### `create`
|
||||
|
||||
Creates a new scheduled workflow run.
|
||||
|
||||
IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](../../../sdks/python/runnables#workflow).
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------------- | ---------- |
|
||||
| `workflow_name` | `str` | The name of the workflow to schedule. | _required_ |
|
||||
| `trigger_at` | `datetime` | The datetime when the run should be triggered. | _required_ |
|
||||
| `input` | `JSONSerializableMapping` | The input data for the scheduled workflow. | _required_ |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata associated with the future run as a key-value pair. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------- | ---------------------------------------- |
|
||||
| `ScheduledWorkflows` | The created scheduled workflow instance. |
|
||||
|
||||
#### `delete`
|
||||
|
||||
Deletes a scheduled workflow run by its ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------------- | ----- | ----------------------------------------------- | ---------- |
|
||||
| `scheduled_id` | `str` | The ID of the scheduled workflow run to delete. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------ | ----------- |
|
||||
| `None` | None |
|
||||
|
||||
#### `get`
|
||||
|
||||
Retrieves a specific scheduled workflow by scheduled run trigger ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| -------------- | ----- | ---------------------------------------------- | ---------- |
|
||||
| `scheduled_id` | `str` | The scheduled workflow trigger ID to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------------- | ------------------------------------------ |
|
||||
| `ScheduledWorkflows` | The requested scheduled workflow instance. |
|
||||
|
||||
#### `list`
|
||||
|
||||
Retrieves a list of scheduled workflows based on provided filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------------------ | ------------------------------------------ | ---------------------------------------------------- | ------- |
|
||||
| `offset` | `int \| None` | The offset to use in pagination. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of scheduled workflows to return. | `None` |
|
||||
| `workflow_id` | `str \| None` | The ID of the workflow to filter by. | `None` |
|
||||
| `parent_workflow_run_id` | `str \| None` | The ID of the parent workflow run to filter by. | `None` |
|
||||
| `statuses` | `list[ScheduledRunStatus] \| None` | A list of statuses to filter by. | `None` |
|
||||
| `additional_metadata` | `Optional[JSONSerializableMapping]` | Additional metadata to filter by. | `None` |
|
||||
| `order_by_field` | `Optional[ScheduledWorkflowsOrderByField]` | The field to order the results by. | `None` |
|
||||
| `order_by_direction` | `Optional[WorkflowRunOrderByDirection]` | The direction to order the results by. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------ | ------------------------------------------------------------ |
|
||||
| `ScheduledWorkflowsList` | A list of scheduled workflows matching the provided filters. |
|
||||
104
frontend/docs/pages/sdks/python/feature-clients/workers.mdx
Normal file
104
frontend/docs/pages/sdks/python/feature-clients/workers.mdx
Normal file
@@ -0,0 +1,104 @@
|
||||
# Workers Client
|
||||
|
||||
Bases: `BaseRestClient`
|
||||
|
||||
The workers client is a client for managing workers programmatically within Hatchet.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ------------ | --------------------------------------------------------------- |
|
||||
| `aio_get` | Get a worker by its ID. |
|
||||
| `aio_list` | List all workers in the tenant determined by the client config. |
|
||||
| `aio_update` | Update a worker by its ID. |
|
||||
| `get` | Get a worker by its ID. |
|
||||
| `list` | List all workers in the tenant determined by the client config. |
|
||||
| `update` | Update a worker by its ID. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `aio_get`
|
||||
|
||||
Get a worker by its ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | ----- | --------------------------------- | ---------- |
|
||||
| `worker_id` | `str` | The ID of the worker to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------- | ----------- |
|
||||
| `Worker` | The worker. |
|
||||
|
||||
#### `aio_list`
|
||||
|
||||
List all workers in the tenant determined by the client config.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------ | ------------------ |
|
||||
| `WorkerList` | A list of workers. |
|
||||
|
||||
#### `aio_update`
|
||||
|
||||
Update a worker by its ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | --------------------- | ------------------------------- | ---------- |
|
||||
| `worker_id` | `str` | The ID of the worker to update. | _required_ |
|
||||
| `opts` | `UpdateWorkerRequest` | The update options. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------- | ------------------- |
|
||||
| `Worker` | The updated worker. |
|
||||
|
||||
#### `get`
|
||||
|
||||
Get a worker by its ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | ----- | --------------------------------- | ---------- |
|
||||
| `worker_id` | `str` | The ID of the worker to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------- | ----------- |
|
||||
| `Worker` | The worker. |
|
||||
|
||||
#### `list`
|
||||
|
||||
List all workers in the tenant determined by the client config.
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------ | ------------------ |
|
||||
| `WorkerList` | A list of workers. |
|
||||
|
||||
#### `update`
|
||||
|
||||
Update a worker by its ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | --------------------- | ------------------------------- | ---------- |
|
||||
| `worker_id` | `str` | The ID of the worker to update. | _required_ |
|
||||
| `opts` | `UpdateWorkerRequest` | The update options. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------- | ------------------- |
|
||||
| `Worker` | The updated worker. |
|
||||
122
frontend/docs/pages/sdks/python/feature-clients/workflows.mdx
Normal file
122
frontend/docs/pages/sdks/python/feature-clients/workflows.mdx
Normal file
@@ -0,0 +1,122 @@
|
||||
# Workflows Client
|
||||
|
||||
Bases: `BaseRestClient`
|
||||
|
||||
The workflows client is a client for managing workflows programmatically within Hatchet.
|
||||
|
||||
Note that workflows are the declaration, _not_ the individual runs. If you're looking for runs, use the `RunsClient` instead.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ----------------- | --------------------------------------------------------------------------------------------- |
|
||||
| `aio_get` | Get a workflow by its ID. |
|
||||
| `aio_get_version` | Get a workflow version by the workflow ID and an optional version. |
|
||||
| `aio_list` | List all workflows in the tenant determined by the client config that match optional filters. |
|
||||
| `get` | Get a workflow by its ID. |
|
||||
| `get_version` | Get a workflow version by the workflow ID and an optional version. |
|
||||
| `list` | List all workflows in the tenant determined by the client config that match optional filters. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `aio_get`
|
||||
|
||||
Get a workflow by its ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | ----- | ----------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------- | ------------- |
|
||||
| `Workflow` | The workflow. |
|
||||
|
||||
#### `aio_get_version`
|
||||
|
||||
Get a workflow version by the workflow ID and an optional version.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | ------------- | --------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to retrieve the version for. | _required_ |
|
||||
| `version` | `str \| None` | The version of the workflow to retrieve. If None, the latest version is returned. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------- | --------------------- |
|
||||
| `WorkflowVersion` | The workflow version. |
|
||||
|
||||
#### `aio_list`
|
||||
|
||||
List all workflows in the tenant determined by the client config that match optional filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------- | ------------- | -------------------------------------- | ------- |
|
||||
| `workflow_name` | `str \| None` | The name of the workflow to filter by. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of items to return. | `None` |
|
||||
| `offset` | `int \| None` | The offset to start the list from. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------- | -------------------- |
|
||||
| `WorkflowList` | A list of workflows. |
|
||||
|
||||
#### `get`
|
||||
|
||||
Get a workflow by its ID.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | ----- | ----------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to retrieve. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------- | ------------- |
|
||||
| `Workflow` | The workflow. |
|
||||
|
||||
#### `get_version`
|
||||
|
||||
Get a workflow version by the workflow ID and an optional version.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ------------- | ------------- | --------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflow_id` | `str` | The ID of the workflow to retrieve the version for. | _required_ |
|
||||
| `version` | `str \| None` | The version of the workflow to retrieve. If None, the latest version is returned. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------- | --------------------- |
|
||||
| `WorkflowVersion` | The workflow version. |
|
||||
|
||||
#### `list`
|
||||
|
||||
List all workflows in the tenant determined by the client config that match optional filters.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------- | ------------- | -------------------------------------- | ------- |
|
||||
| `workflow_name` | `str \| None` | The name of the workflow to filter by. | `None` |
|
||||
| `limit` | `int \| None` | The maximum number of items to return. | `None` |
|
||||
| `offset` | `int \| None` | The offset to start the list from. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| -------------- | -------------------- |
|
||||
| `WorkflowList` | A list of workflows. |
|
||||
619
frontend/docs/pages/sdks/python/runnables.mdx
Normal file
619
frontend/docs/pages/sdks/python/runnables.mdx
Normal file
@@ -0,0 +1,619 @@
|
||||
# Runnables
|
||||
|
||||
`Runnables` in the Hatchet SDK are things that can be run, namely tasks and workflows. The two main types of runnables you'll encounter are:
|
||||
|
||||
- `Workflow`, which lets you define tasks and call all of the run, schedule, etc. methods
|
||||
- `Standalone`, which is a single task that's returned by `hatchet.task` and can be run, scheduled, etc.
|
||||
|
||||
## Workflow
|
||||
|
||||
Bases: `BaseWorkflow[TWorkflowInput]`
|
||||
|
||||
A Hatchet workflow, which allows you to define tasks to be run and perform actions on the workflow.
|
||||
|
||||
Workflows in Hatchet represent coordinated units of work that can be triggered, scheduled, or run on a cron schedule. Each workflow can contain multiple tasks that can be arranged in dependencies (DAGs), have customized retry behavior, timeouts, concurrency controls, and more.
|
||||
|
||||
Example:
|
||||
|
||||
```python
|
||||
from pydantic import BaseModel
|
||||
from hatchet_sdk import Hatchet
|
||||
|
||||
class MyInput(BaseModel):
|
||||
name: str
|
||||
|
||||
hatchet = Hatchet()
|
||||
workflow = hatchet.workflow("my-workflow", input_type=MyInput)
|
||||
|
||||
@workflow.task()
|
||||
def greet(input, ctx):
|
||||
return f"Hello, {input.name}!"
|
||||
|
||||
# Run the workflow
|
||||
result = workflow.run(MyInput(name="World"))
|
||||
```
|
||||
|
||||
Workflows support various execution patterns including:
|
||||
|
||||
- One-time execution with `run()` or `aio_run()`
|
||||
- Scheduled execution with `schedule()`
|
||||
- Cron-based recurring execution with `create_cron()`
|
||||
- Bulk operations with `run_many()`
|
||||
|
||||
Tasks within workflows can be defined with `@workflow.task()` or `@workflow.durable_task()` decorators and can be arranged into complex dependency patterns.
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `task` | A decorator to transform a function into a Hatchet task that runs as part of a workflow. |
|
||||
| `durable_task` | A decorator to transform a function into a durable Hatchet task that run as part of a workflow. |
|
||||
| `on_failure_task` | A decorator to transform a function into a Hatchet on-failure task that runs as the last step in a workflow that had at least one task fail. |
|
||||
| `on_success_task` | A decorator to transform a function into a Hatchet on-success task that runs as the last step in a workflow that had all upstream tasks succeed. |
|
||||
| `run` | Run the workflow synchronously and wait for it to complete. |
|
||||
| `aio_run` | Run the workflow asynchronously and wait for it to complete. |
|
||||
| `run_no_wait` | Synchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `aio_run_no_wait` | Asynchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `aio_run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `aio_run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `aio_schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `create_cron` | Create a cron job for the workflow. |
|
||||
| `aio_create_cron` | Create a cron job for the workflow. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `task`
|
||||
|
||||
A decorator to transform a function into a Hatchet task that runs as part of a workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------------- | --------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------- |
|
||||
| `name` | `str \| None` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time. | `DEFAULT_SCHEDULE_TIMEOUT` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time. | `DEFAULT_EXECUTION_TIMEOUT` |
|
||||
| `parents` | `list[Task[TWorkflowInput, Any]]` | A list of tasks that are parents of the task. Note: Parents must be defined before their children. | `[]` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. See documentation and examples on affinity and worker labels for more details. | `{}` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression]` | A list of concurrency expressions for the task. | `[]` |
|
||||
| `wait_for` | `list[Condition \| OrGroup]` | A list of conditions that must be met before the task can run. | `[]` |
|
||||
| `skip_if` | `list[Condition \| OrGroup]` | A list of conditions that, if met, will cause the task to be skipped. | `[]` |
|
||||
| `cancel_if` | `list[Condition \| OrGroup]` | A list of conditions that, if met, will cause the task to be canceled. | `[]` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------------------------------------------------------------------- | ------------------------------------------ |
|
||||
| `Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. |
|
||||
|
||||
#### `durable_task`
|
||||
|
||||
A decorator to transform a function into a durable Hatchet task that run as part of a workflow.
|
||||
|
||||
**IMPORTANT:** This decorator creates a _durable_ task, which works using Hatchet's durable execution capabilities. This is an advanced feature of Hatchet.
|
||||
|
||||
See the Hatchet docs for more information on durable execution to decide if this is right for you.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------------------- | --------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------- |
|
||||
| `name` | `str \| None` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time. | `DEFAULT_SCHEDULE_TIMEOUT` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time. | `DEFAULT_EXECUTION_TIMEOUT` |
|
||||
| `parents` | `list[Task[TWorkflowInput, Any]]` | A list of tasks that are parents of the task. Note: Parents must be defined before their children. | `[]` |
|
||||
| `retries` | `int` | The number of times to retry the task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` |
|
||||
| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. See documentation and examples on affinity and worker labels for more details. | `{}` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression]` | A list of concurrency expressions for the task. | `[]` |
|
||||
| `wait_for` | `list[Condition \| OrGroup]` | A list of conditions that must be met before the task can run. | `[]` |
|
||||
| `skip_if` | `list[Condition \| OrGroup]` | A list of conditions that, if met, will cause the task to be skipped. | `[]` |
|
||||
| `cancel_if` | `list[Condition \| OrGroup]` | A list of conditions that, if met, will cause the task to be canceled. | `[]` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------------------------------------------------------------------ | ------------------------------------------ |
|
||||
| `Callable[[Callable[[TWorkflowInput, DurableContext], R]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. |
|
||||
|
||||
#### `on_failure_task`
|
||||
|
||||
A decorator to transform a function into a Hatchet on-failure task that runs as the last step in a workflow that had at least one task fail.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | --------------------------- |
|
||||
| `name` | `str \| None` | The name of the on-failure task. If not specified, defaults to the name of the function being wrapped by the `on_failure_task` decorator. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time. | `DEFAULT_SCHEDULE_TIMEOUT` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time. | `DEFAULT_EXECUTION_TIMEOUT` |
|
||||
| `retries` | `int` | The number of times to retry the on-failure task before failing. | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the on-failure task. | `[]` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression]` | A list of concurrency expressions for the on-success task. | `[]` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------------------------------------------------------------------- | ------------------------------------------ |
|
||||
| `Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. |
|
||||
|
||||
#### `on_success_task`
|
||||
|
||||
A decorator to transform a function into a Hatchet on-success task that runs as the last step in a workflow that had all upstream tasks succeed.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ----------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------- | --------------------------- |
|
||||
| `name` | `str \| None` | The name of the on-success task. If not specified, defaults to the name of the function being wrapped by the `on_failure_task` decorator. | `None` |
|
||||
| `schedule_timeout` | `Duration` | The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time. | `DEFAULT_SCHEDULE_TIMEOUT` |
|
||||
| `execution_timeout` | `Duration` | The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time. | `DEFAULT_EXECUTION_TIMEOUT` |
|
||||
| `retries` | `int` | The number of times to retry the on-success task before failing | `0` |
|
||||
| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the on-success task. | `[]` |
|
||||
| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` |
|
||||
| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` |
|
||||
| `concurrency` | `list[ConcurrencyExpression]` | A list of concurrency expressions for the on-success task. | `[]` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------------------------------------------------------------------- | ------------------------------------------ |
|
||||
| `Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. |
|
||||
|
||||
#### `run`
|
||||
|
||||
Run the workflow synchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ------------------------------------------------------------------------------- | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow, must match the workflow's input type. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for workflow execution like metadata and parent workflow ID. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------- | ----------------------------------------------------- |
|
||||
| `dict[str, Any]` | The result of the workflow execution as a dictionary. |
|
||||
|
||||
#### `aio_run`
|
||||
|
||||
Run the workflow asynchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ------------------------------------------------------------------------------- | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow, must match the workflow's input type. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for workflow execution like metadata and parent workflow ID. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------- | ----------------------------------------------------- |
|
||||
| `dict[str, Any]` | The result of the workflow execution as a dictionary. |
|
||||
|
||||
#### `run_no_wait`
|
||||
|
||||
Synchronously trigger a workflow run without waiting for it to complete. This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ------------------------------------------ | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for workflow execution. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------- | ------------------------------------------------------------------------- |
|
||||
| `WorkflowRunRef` | A `WorkflowRunRef` object representing the reference to the workflow run. |
|
||||
|
||||
#### `aio_run_no_wait`
|
||||
|
||||
Asynchronously trigger a workflow run without waiting for it to complete. This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ------------------------------------------ | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for workflow execution. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------- | ------------------------------------------------------------------------- |
|
||||
| `WorkflowRunRef` | A `WorkflowRunRef` object representing the reference to the workflow run. |
|
||||
|
||||
#### `run_many`
|
||||
|
||||
Run a workflow in bulk and wait for all runs to complete. This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | -------------------------------- | ----------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflows` | `list[WorkflowRunTriggerConfig]` | A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------- | ---------------------------------------- |
|
||||
| `list[dict[str, Any]]` | A list of results for each workflow run. |
|
||||
|
||||
#### `aio_run_many`
|
||||
|
||||
Run a workflow in bulk and wait for all runs to complete. This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | -------------------------------- | ----------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflows` | `list[WorkflowRunTriggerConfig]` | A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------- | ---------------------------------------- |
|
||||
| `list[dict[str, Any]]` | A list of results for each workflow run. |
|
||||
|
||||
#### `run_many_no_wait`
|
||||
|
||||
Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | -------------------------------- | ----------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflows` | `list[WorkflowRunTriggerConfig]` | A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------- | ------------------------------------------------------------------------------------ |
|
||||
| `list[WorkflowRunRef]` | A list of `WorkflowRunRef` objects, each representing a reference to a workflow run. |
|
||||
|
||||
#### `aio_run_many_no_wait`
|
||||
|
||||
Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | -------------------------------- | ----------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflows` | `list[WorkflowRunTriggerConfig]` | A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---------------------- | ------------------------------------------------------------------------------------ |
|
||||
| `list[WorkflowRunRef]` | A list of `WorkflowRunRef` objects, each representing a reference to a workflow run. |
|
||||
|
||||
#### `schedule`
|
||||
|
||||
Schedule a workflow to run at a specific time.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | -------------------------------- | ------------------------------------------- | ------------------------------------ |
|
||||
| `run_at` | `datetime` | The time at which to schedule the workflow. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `ScheduleTriggerWorkflowOptions` | Additional options for workflow execution. | `ScheduleTriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------- | --------------------------------------------------------------- |
|
||||
| `WorkflowVersion` | A `WorkflowVersion` object representing the scheduled workflow. |
|
||||
|
||||
#### `aio_schedule`
|
||||
|
||||
Schedule a workflow to run at a specific time.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | -------------------------------- | ------------------------------------------- | ------------------------------------ |
|
||||
| `run_at` | `datetime` | The time at which to schedule the workflow. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `ScheduleTriggerWorkflowOptions` | Additional options for workflow execution. | `ScheduleTriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------- | --------------------------------------------------------------- |
|
||||
| `WorkflowVersion` | A `WorkflowVersion` object representing the scheduled workflow. |
|
||||
|
||||
#### `create_cron`
|
||||
|
||||
Create a cron job for the workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata for the cron job. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ----------------------------------------------------------- |
|
||||
| `CronWorkflows` | A `CronWorkflows` object representing the created cron job. |
|
||||
|
||||
#### `aio_create_cron`
|
||||
|
||||
Create a cron job for the workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata for the cron job. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ----------------------------------------------------------- |
|
||||
| `CronWorkflows` | A `CronWorkflows` object representing the created cron job. |
|
||||
|
||||
## Standalone
|
||||
|
||||
Bases: `BaseWorkflow[TWorkflowInput]`, `Generic[TWorkflowInput, R]`
|
||||
|
||||
Methods:
|
||||
|
||||
| Name | Description |
|
||||
| ---------------------- | ------------------------------------------------------------------------- |
|
||||
| `run` | Synchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `aio_run` | Run the workflow asynchronously and wait for it to complete. |
|
||||
| `run_no_wait` | Run the workflow synchronously and wait for it to complete. |
|
||||
| `aio_run_no_wait` | Asynchronously trigger a workflow run without waiting for it to complete. |
|
||||
| `run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `aio_run_many` | Run a workflow in bulk and wait for all runs to complete. |
|
||||
| `run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `aio_run_many_no_wait` | Run a workflow in bulk without waiting for all runs to complete. |
|
||||
| `schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `aio_schedule` | Schedule a workflow to run at a specific time. |
|
||||
| `create_cron` | Create a cron job for the workflow. |
|
||||
| `aio_create_cron` | Create a cron job for the workflow. |
|
||||
|
||||
### Functions
|
||||
|
||||
#### `run`
|
||||
|
||||
Synchronously trigger a workflow run without waiting for it to complete. This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ------------------------------------------ | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for workflow execution. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---- | ------------------------------------------------------------------------- |
|
||||
| `R` | A `WorkflowRunRef` object representing the reference to the workflow run. |
|
||||
|
||||
#### `aio_run`
|
||||
|
||||
Run the workflow asynchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ------------------------------------------------------------------------------- | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow, must match the workflow's input type. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for workflow execution like metadata and parent workflow ID. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ---- | ----------------------------------------------------- |
|
||||
| `R` | The result of the workflow execution as a dictionary. |
|
||||
|
||||
#### `run_no_wait`
|
||||
|
||||
Run the workflow synchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ------------------------------------------------------------------------------- | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow, must match the workflow's input type. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for workflow execution like metadata and parent workflow ID. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------------- | ----------------------------------------------------- |
|
||||
| `TaskRunRef[TWorkflowInput, R]` | The result of the workflow execution as a dictionary. |
|
||||
|
||||
#### `aio_run_no_wait`
|
||||
|
||||
Asynchronously trigger a workflow run without waiting for it to complete. This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | ------------------------ | ------------------------------------------ | ------------------------------------ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `TriggerWorkflowOptions` | Additional options for workflow execution. | `TriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------------- | ------------------------------------------------------------------------- |
|
||||
| `TaskRunRef[TWorkflowInput, R]` | A `WorkflowRunRef` object representing the reference to the workflow run. |
|
||||
|
||||
#### `run_many`
|
||||
|
||||
Run a workflow in bulk and wait for all runs to complete. This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | -------------------------------- | ----------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflows` | `list[WorkflowRunTriggerConfig]` | A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------- | ---------------------------------------- |
|
||||
| `list[R]` | A list of results for each workflow run. |
|
||||
|
||||
#### `aio_run_many`
|
||||
|
||||
Run a workflow in bulk and wait for all runs to complete. This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | -------------------------------- | ----------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflows` | `list[WorkflowRunTriggerConfig]` | A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------- | ---------------------------------------- |
|
||||
| `list[R]` | A list of results for each workflow run. |
|
||||
|
||||
#### `run_many_no_wait`
|
||||
|
||||
Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | -------------------------------- | ----------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflows` | `list[WorkflowRunTriggerConfig]` | A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| `list[TaskRunRef[TWorkflowInput, R]]` | A list of `WorkflowRunRef` objects, each representing a reference to a workflow run. |
|
||||
|
||||
#### `aio_run_many_no_wait`
|
||||
|
||||
Run a workflow in bulk without waiting for all runs to complete. This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| ----------- | -------------------------------- | ----------------------------------------------------------------------------------------------- | ---------- |
|
||||
| `workflows` | `list[WorkflowRunTriggerConfig]` | A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered. | _required_ |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ------------------------------------- | ------------------------------------------------------------------------------------ |
|
||||
| `list[TaskRunRef[TWorkflowInput, R]]` | A list of `WorkflowRunRef` objects, each representing a reference to a workflow run. |
|
||||
|
||||
#### `schedule`
|
||||
|
||||
Schedule a workflow to run at a specific time.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | -------------------------------- | ------------------------------------------- | ------------------------------------ |
|
||||
| `run_at` | `datetime` | The time at which to schedule the workflow. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `ScheduleTriggerWorkflowOptions` | Additional options for workflow execution. | `ScheduleTriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------- | --------------------------------------------------------------- |
|
||||
| `WorkflowVersion` | A `WorkflowVersion` object representing the scheduled workflow. |
|
||||
|
||||
#### `aio_schedule`
|
||||
|
||||
Schedule a workflow to run at a specific time.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------- | -------------------------------- | ------------------------------------------- | ------------------------------------ |
|
||||
| `run_at` | `datetime` | The time at which to schedule the workflow. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `options` | `ScheduleTriggerWorkflowOptions` | Additional options for workflow execution. | `ScheduleTriggerWorkflowOptions()` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| ----------------- | --------------------------------------------------------------- |
|
||||
| `WorkflowVersion` | A `WorkflowVersion` object representing the scheduled workflow. |
|
||||
|
||||
#### `create_cron`
|
||||
|
||||
Create a cron job for the workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata for the cron job. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ----------------------------------------------------------- |
|
||||
| `CronWorkflows` | A `CronWorkflows` object representing the created cron job. |
|
||||
|
||||
#### `aio_create_cron`
|
||||
|
||||
Create a cron job for the workflow.
|
||||
|
||||
Parameters:
|
||||
|
||||
| Name | Type | Description | Default |
|
||||
| --------------------- | ------------------------- | ----------------------------------------------------------------- | ------------------------------------ |
|
||||
| `cron_name` | `str` | The name of the cron job. | _required_ |
|
||||
| `expression` | `str` | The cron expression that defines the schedule for the cron job. | _required_ |
|
||||
| `input` | `TWorkflowInput` | The input data for the workflow. | `cast(TWorkflowInput, EmptyModel())` |
|
||||
| `additional_metadata` | `JSONSerializableMapping` | Additional metadata for the cron job. | `{}` |
|
||||
| `priority` | `int \| None` | The priority of the cron job. Must be between 1 and 3, inclusive. | `None` |
|
||||
|
||||
Returns:
|
||||
|
||||
| Type | Description |
|
||||
| --------------- | ----------------------------------------------------------- |
|
||||
| `CronWorkflows` | A `CronWorkflows` object representing the created cron job. |
|
||||
@@ -1,29 +1,29 @@
|
||||
export default {
|
||||
"index": "Introduction",
|
||||
index: "Introduction",
|
||||
"-- Docker": {
|
||||
"type": "separator",
|
||||
"title": "Docker"
|
||||
type: "separator",
|
||||
title: "Docker",
|
||||
},
|
||||
"hatchet-lite": "Hatchet Lite",
|
||||
"docker-compose": "Docker Compose",
|
||||
"-- Kubernetes": {
|
||||
"type": "separator",
|
||||
"title": "Kubernetes"
|
||||
type: "separator",
|
||||
title: "Kubernetes",
|
||||
},
|
||||
"kubernetes-quickstart": "Quickstart",
|
||||
"kubernetes-glasskube": "Installing with Glasskube",
|
||||
"networking": "Networking",
|
||||
networking: "Networking",
|
||||
"kubernetes-helm-configuration": "Configuring the Helm Chart",
|
||||
"kubernetes-external-database": "Setting up an External Database",
|
||||
"high-availability": "High Availability",
|
||||
"-- Managing Hatchet": {
|
||||
"type": "separator",
|
||||
"title": "Managing Hatchet"
|
||||
type: "separator",
|
||||
title: "Managing Hatchet",
|
||||
},
|
||||
"configuration-options": "Engine Configuration Options",
|
||||
"prometheus-metrics": "Prometheus Metrics",
|
||||
"worker-configuration-options": "Worker Configuration Options",
|
||||
"benchmarking": "Benchmarking",
|
||||
benchmarking: "Benchmarking",
|
||||
"data-retention": "Data Retention",
|
||||
"improving-performance": "Improving Performance"
|
||||
}
|
||||
"improving-performance": "Improving Performance",
|
||||
};
|
||||
|
||||
@@ -7,9 +7,9 @@ const config = {
|
||||
<Image src="/hatchet_logo.png" alt="Hatchet logo" width={120} height={35} />
|
||||
),
|
||||
head: () => {
|
||||
const { title } = useConfig()
|
||||
const { title } = useConfig();
|
||||
|
||||
const fallbackTitle = "Hatchet Documentation"
|
||||
const fallbackTitle = "Hatchet Documentation";
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -46,10 +46,6 @@ const config = {
|
||||
defaultMenuCollapseLevel: 2,
|
||||
toggleButton: true,
|
||||
},
|
||||
toc: {
|
||||
backToTop: null,
|
||||
component: null,
|
||||
},
|
||||
darkMode: true,
|
||||
nextThemes: {
|
||||
defaultTheme: "dark",
|
||||
|
||||
@@ -44,10 +44,6 @@ const config = {
|
||||
defaultMenuCollapseLevel: 2,
|
||||
toggleButton: true,
|
||||
},
|
||||
toc: {
|
||||
backToTop: null,
|
||||
component: null,
|
||||
},
|
||||
darkMode: true,
|
||||
nextThemes: {
|
||||
defaultTheme: "dark",
|
||||
|
||||
5
sdks/python/_meta.js
Normal file
5
sdks/python/_meta.js
Normal file
@@ -0,0 +1,5 @@
|
||||
export default {
|
||||
"runnables.md": {
|
||||
"title": "Runnablesmd",
|
||||
},
|
||||
}
|
||||
0
sdks/python/docs/__init__.py
Normal file
0
sdks/python/docs/__init__.py
Normal file
24
sdks/python/docs/client.md
Normal file
24
sdks/python/docs/client.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Hatchet Python SDK Reference
|
||||
|
||||
This is the Python SDK reference, documenting methods available for interacting with Hatchet resources. Check out the [user guide](https://docs.hatchet.run/home) for an introduction for getting your first tasks running
|
||||
|
||||
## The Hatchet Python Client
|
||||
|
||||
::: hatchet.Hatchet
|
||||
options:
|
||||
members:
|
||||
- cron
|
||||
- event
|
||||
- logs
|
||||
- metrics
|
||||
- rate_limits
|
||||
- runs
|
||||
- scheduled
|
||||
- workers
|
||||
- workflows
|
||||
- tenant_id
|
||||
- namespace
|
||||
- worker
|
||||
- workflow
|
||||
- task
|
||||
- durable_task
|
||||
3
sdks/python/docs/feature-clients/cron.md
Normal file
3
sdks/python/docs/feature-clients/cron.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Cron Client
|
||||
|
||||
::: features.cron.CronClient
|
||||
3
sdks/python/docs/feature-clients/logs.md
Normal file
3
sdks/python/docs/feature-clients/logs.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Logs Client
|
||||
|
||||
::: features.logs.LogsClient
|
||||
3
sdks/python/docs/feature-clients/metrics.md
Normal file
3
sdks/python/docs/feature-clients/metrics.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Metrics Client
|
||||
|
||||
::: features.metrics.MetricsClient
|
||||
3
sdks/python/docs/feature-clients/rate_limits.md
Normal file
3
sdks/python/docs/feature-clients/rate_limits.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Rate Limits Client
|
||||
|
||||
::: features.rate_limits.RateLimitsClient
|
||||
22
sdks/python/docs/feature-clients/runs.md
Normal file
22
sdks/python/docs/feature-clients/runs.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Runs Client
|
||||
|
||||
::: features.runs.RunsClient
|
||||
options:
|
||||
members:
|
||||
- get
|
||||
- aio_get
|
||||
- list
|
||||
- aio_list
|
||||
- create
|
||||
- aio_create
|
||||
- replay
|
||||
- aio_replay
|
||||
- bulk_replay
|
||||
- aio_bulk_replay
|
||||
- cancel
|
||||
- aio_cancel
|
||||
- bulk_cancel
|
||||
- aio_bulk_cancel
|
||||
- get_result
|
||||
- aio_get_result
|
||||
- get_run_ref
|
||||
3
sdks/python/docs/feature-clients/scheduled.md
Normal file
3
sdks/python/docs/feature-clients/scheduled.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Scheduled Client
|
||||
|
||||
::: features.scheduled.ScheduledClient
|
||||
3
sdks/python/docs/feature-clients/workers.md
Normal file
3
sdks/python/docs/feature-clients/workers.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Workers Client
|
||||
|
||||
::: features.workers.WorkersClient
|
||||
3
sdks/python/docs/feature-clients/workflows.md
Normal file
3
sdks/python/docs/feature-clients/workflows.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# Workflows Client
|
||||
|
||||
::: features.workflows.WorkflowsClient
|
||||
0
sdks/python/docs/generator/__init__.py
Normal file
0
sdks/python/docs/generator/__init__.py
Normal file
115
sdks/python/docs/generator/generate.py
Normal file
115
sdks/python/docs/generator/generate.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import argparse
|
||||
import asyncio
|
||||
import os
|
||||
from typing import cast
|
||||
|
||||
from docs.generator.llm import parse_markdown
|
||||
from docs.generator.paths import crawl_directory, find_child_paths
|
||||
from docs.generator.shared import TMP_GEN_PATH
|
||||
from docs.generator.types import Document
|
||||
from docs.generator.utils import gather_max_concurrency, rm_rf
|
||||
|
||||
|
||||
async def clean_markdown_with_openai(document: Document) -> None:
|
||||
print("Generating mdx for", document.readable_source_path)
|
||||
|
||||
with open(document.source_path, "r", encoding="utf-8") as f:
|
||||
original_md = f.read()
|
||||
|
||||
content = await parse_markdown(original_markdown=original_md)
|
||||
|
||||
if not content:
|
||||
return None
|
||||
|
||||
with open(document.mdx_output_path, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
|
||||
|
||||
def generate_sub_meta_entry(child: str) -> str:
|
||||
child = child.replace("/", "")
|
||||
return f"""
|
||||
"{child}": {{
|
||||
"title": "{child.replace("-", " ").title()}",
|
||||
"theme": {{
|
||||
"toc": true
|
||||
}},
|
||||
}},
|
||||
"""
|
||||
|
||||
|
||||
def generate_meta_js(docs: list[Document], children: set[str]) -> str:
|
||||
prefix = docs[0].directory
|
||||
subentries = [doc.meta_js_entry for doc in docs] + [
|
||||
generate_sub_meta_entry(child.replace(prefix, "")) for child in children
|
||||
]
|
||||
|
||||
sorted_subentries = sorted(
|
||||
subentries,
|
||||
key=lambda x: x.strip().split(":")[0].strip('"').lower(),
|
||||
)
|
||||
|
||||
entries = "".join(sorted_subentries)
|
||||
|
||||
return f"export default {{{entries}}}"
|
||||
|
||||
|
||||
def update_meta_js(documents: list[Document]) -> None:
|
||||
meta_js_out_paths = {d.mdx_output_meta_js_path for d in documents}
|
||||
|
||||
for path in meta_js_out_paths:
|
||||
relevant_documents = [d for d in documents if d.mdx_output_meta_js_path == path]
|
||||
|
||||
exemplar = relevant_documents[0]
|
||||
|
||||
directory = exemplar.directory
|
||||
|
||||
children = find_child_paths(directory, documents)
|
||||
|
||||
meta = generate_meta_js(relevant_documents, children)
|
||||
|
||||
out_path = exemplar.mdx_output_meta_js_path
|
||||
|
||||
with open(out_path, "w", encoding="utf-8") as f:
|
||||
f.write(meta)
|
||||
|
||||
|
||||
async def run(selections: list[str]) -> None:
|
||||
rm_rf(TMP_GEN_PATH)
|
||||
|
||||
try:
|
||||
os.system("poetry run mkdocs build")
|
||||
documents = crawl_directory(TMP_GEN_PATH, selections)
|
||||
|
||||
await gather_max_concurrency(
|
||||
*[clean_markdown_with_openai(d) for d in documents], max_concurrency=10
|
||||
)
|
||||
|
||||
if not selections:
|
||||
update_meta_js(documents)
|
||||
|
||||
os.chdir("../../frontend/docs")
|
||||
os.system("pnpm lint:fix")
|
||||
finally:
|
||||
rm_rf("docs/site")
|
||||
rm_rf("site")
|
||||
rm_rf(TMP_GEN_PATH)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"--select",
|
||||
nargs="*",
|
||||
type=str,
|
||||
help="Select a subset of docs to generate. Note that this will prevent the `_meta.js` file from being generated.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
selections = cast(list[str], args.select or [])
|
||||
|
||||
asyncio.run(run(selections))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
20
sdks/python/docs/generator/llm.py
Normal file
20
sdks/python/docs/generator/llm.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from openai import AsyncOpenAI
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
from docs.generator.prompts import create_prompt_messages
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
openai_api_key: str = "fake-key"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
client = AsyncOpenAI(api_key=settings.openai_api_key)
|
||||
|
||||
|
||||
async def parse_markdown(original_markdown: str) -> str | None:
|
||||
response = await client.chat.completions.create(
|
||||
model="gpt-4o", messages=create_prompt_messages(original_markdown)
|
||||
)
|
||||
|
||||
return response.choices[0].message.content
|
||||
147
sdks/python/docs/generator/markdown_export.py
Normal file
147
sdks/python/docs/generator/markdown_export.py
Normal file
@@ -0,0 +1,147 @@
|
||||
import os
|
||||
from typing import cast
|
||||
|
||||
from bs4 import BeautifulSoup, Tag
|
||||
from markdownify import markdownify # type: ignore[import-untyped]
|
||||
from mkdocs.config.defaults import MkDocsConfig
|
||||
from mkdocs.plugins import BasePlugin
|
||||
from mkdocs.structure.pages import Page
|
||||
|
||||
from docs.generator.shared import TMP_GEN_PATH
|
||||
|
||||
|
||||
class MarkdownExportPlugin(BasePlugin): # type: ignore
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
self.soup: BeautifulSoup
|
||||
self.page_source_path: str
|
||||
|
||||
def _remove_async_tags(self) -> "MarkdownExportPlugin":
|
||||
spans = self.soup.find_all("span", class_="doc doc-labels")
|
||||
|
||||
for span in spans:
|
||||
if span.find(string="async") or (
|
||||
span.text and "async" == span.get_text().strip()
|
||||
):
|
||||
span.decompose()
|
||||
|
||||
return self
|
||||
|
||||
def _remove_hash_links(self) -> "MarkdownExportPlugin":
|
||||
links = self.soup.find_all("a", class_="headerlink")
|
||||
for link in links:
|
||||
href = cast(str, link["href"])
|
||||
if href.startswith("#"):
|
||||
link.decompose()
|
||||
|
||||
return self
|
||||
|
||||
def _remove_toc(self) -> "MarkdownExportPlugin":
|
||||
tocs = self.soup.find_all("nav")
|
||||
for toc in tocs:
|
||||
toc.decompose()
|
||||
|
||||
return self
|
||||
|
||||
def _remove_footer(self) -> "MarkdownExportPlugin":
|
||||
footer = self.soup.find("footer")
|
||||
if footer and isinstance(footer, Tag):
|
||||
footer.decompose()
|
||||
|
||||
return self
|
||||
|
||||
def _remove_navbar(self) -> "MarkdownExportPlugin":
|
||||
navbar = self.soup.find("div", class_="navbar")
|
||||
if navbar and isinstance(navbar, Tag):
|
||||
navbar.decompose()
|
||||
|
||||
navbar_header = self.soup.find("div", class_="navbar-header")
|
||||
if navbar_header and isinstance(navbar_header, Tag):
|
||||
navbar_header.decompose()
|
||||
navbar_collapse = self.soup.find("div", class_="navbar-collapse")
|
||||
if navbar_collapse and isinstance(navbar_collapse, Tag):
|
||||
navbar_collapse.decompose()
|
||||
|
||||
return self
|
||||
|
||||
def _remove_keyboard_shortcuts_modal(self) -> "MarkdownExportPlugin":
|
||||
modal = self.soup.find("div", id="mkdocs_keyboard_modal")
|
||||
|
||||
if modal and isinstance(modal, Tag):
|
||||
modal.decompose()
|
||||
|
||||
return self
|
||||
|
||||
def _remove_title(self) -> "MarkdownExportPlugin":
|
||||
title = self.soup.find("h1", class_="title")
|
||||
|
||||
if title and isinstance(title, Tag):
|
||||
title.decompose()
|
||||
|
||||
return self
|
||||
|
||||
def _remove_property_tags(self) -> "MarkdownExportPlugin":
|
||||
property_tags = self.soup.find_all("code", string="property")
|
||||
|
||||
for tag in property_tags:
|
||||
tag.decompose()
|
||||
|
||||
return self
|
||||
|
||||
def _interpolate_docs_links(self) -> "MarkdownExportPlugin":
|
||||
links = self.soup.find_all("a")
|
||||
page_depth = self.page_source_path.count("/")
|
||||
|
||||
## Using the depth + 2 here because the links are relative to the root of
|
||||
## the SDK docs subdir, which sits at `/sdks/python` (two levels below the root)
|
||||
dirs_up_prefix = "../" * (page_depth + 2)
|
||||
|
||||
for link in links:
|
||||
href = link.get("href")
|
||||
|
||||
if not href:
|
||||
continue
|
||||
|
||||
href = cast(str, link["href"])
|
||||
|
||||
if href.startswith("https://docs.hatchet.run/"):
|
||||
link["href"] = href.replace("https://docs.hatchet.run/", dirs_up_prefix)
|
||||
|
||||
return self
|
||||
|
||||
def _preprocess_html(self, content: str) -> str:
|
||||
self.soup = BeautifulSoup(content, "html.parser")
|
||||
|
||||
(
|
||||
self._remove_async_tags()
|
||||
._remove_hash_links()
|
||||
._remove_toc()
|
||||
._remove_footer()
|
||||
._remove_keyboard_shortcuts_modal()
|
||||
._remove_navbar()
|
||||
._remove_title()
|
||||
._remove_property_tags()
|
||||
._interpolate_docs_links()
|
||||
)
|
||||
|
||||
return str(self.soup)
|
||||
|
||||
def on_post_page(
|
||||
self, output_content: str, page: Page, config: MkDocsConfig
|
||||
) -> str:
|
||||
self.page_source_path = page.file.src_uri
|
||||
|
||||
content = self._preprocess_html(output_content)
|
||||
md_content = markdownify(content, heading_style="ATX", wrap=False)
|
||||
|
||||
if not md_content:
|
||||
return content
|
||||
|
||||
dest = os.path.splitext(page.file.dest_path)[0] + ".md"
|
||||
out_path = os.path.join(TMP_GEN_PATH, dest)
|
||||
os.makedirs(os.path.dirname(out_path), exist_ok=True)
|
||||
|
||||
with open(out_path, "w", encoding="utf-8") as f:
|
||||
f.write(md_content)
|
||||
|
||||
return content
|
||||
24
sdks/python/docs/generator/paths.py
Normal file
24
sdks/python/docs/generator/paths.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import os
|
||||
|
||||
from docs.generator.types import Document
|
||||
|
||||
|
||||
def crawl_directory(directory: str, only_include: list[str]) -> list[Document]:
|
||||
return [
|
||||
d
|
||||
for root, _, filenames in os.walk(directory)
|
||||
for filename in filenames
|
||||
if (d := Document.from_path(os.path.join(root, filename))).readable_source_path
|
||||
in only_include
|
||||
or not only_include
|
||||
]
|
||||
|
||||
|
||||
def find_child_paths(prefix: str, docs: list[Document]) -> set[str]:
|
||||
return {
|
||||
doc.directory
|
||||
for doc in docs
|
||||
if doc.directory.startswith(prefix)
|
||||
and doc.directory != prefix
|
||||
and doc.directory.count("/") == prefix.count("/") + 1
|
||||
}
|
||||
36
sdks/python/docs/generator/prompts.py
Normal file
36
sdks/python/docs/generator/prompts.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from typing import ParamSpec, TypeVar, cast
|
||||
|
||||
from openai.types.chat import (
|
||||
ChatCompletionMessageParam,
|
||||
ChatCompletionSystemMessageParam,
|
||||
ChatCompletionUserMessageParam,
|
||||
)
|
||||
|
||||
T = TypeVar("T")
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
SYSTEM_PROMPT = """
|
||||
You're an SDK documentation expert working on improving the readability of Hatchet's Python SDK documentation. You will be given
|
||||
a markdown file, and your task is to fix any broken MDX so it can be used as a page on our Nextra documentation site.
|
||||
|
||||
In your work, follow these instructions:
|
||||
|
||||
1. Strip any unnecessary paragraph characters, but do not change any actual code, sentences, or content. You should keep the documentation as close to the original as possible, meaning that you should not generate new content, you should not consolidate existing content, you should not rearrange content, and so on.
|
||||
2. Return only the content. You should not enclode the markdown in backticks or any other formatting.
|
||||
3. You must ensure that MDX will render any tables correctly. One thing in particular to be on the lookout for is the use of the pipe `|` in type hints in the tables. For example, `int | None` is the Python type `Optional[int]` and should render in a single column with an escaped pipe character.
|
||||
4. All code blocks should be formatted as `python`.
|
||||
"""
|
||||
|
||||
|
||||
def create_prompt_messages(
|
||||
user_prompt_content: str,
|
||||
) -> list[ChatCompletionMessageParam]:
|
||||
return cast(
|
||||
list[ChatCompletionMessageParam],
|
||||
[
|
||||
ChatCompletionSystemMessageParam(content=SYSTEM_PROMPT, role="system"),
|
||||
ChatCompletionUserMessageParam(content=user_prompt_content, role="user"),
|
||||
],
|
||||
)
|
||||
1
sdks/python/docs/generator/shared.py
Normal file
1
sdks/python/docs/generator/shared.py
Normal file
@@ -0,0 +1 @@
|
||||
TMP_GEN_PATH = "/tmp/hatchet-python/docs/gen"
|
||||
67
sdks/python/docs/generator/types.py
Normal file
67
sdks/python/docs/generator/types.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from docs.generator.shared import TMP_GEN_PATH
|
||||
|
||||
FRONTEND_DOCS_RELATIVE_PATH = "../../frontend/docs/pages/sdks/python"
|
||||
|
||||
MD_EXTENSION = "md"
|
||||
MDX_EXTENSION = "mdx"
|
||||
PY_EXTENSION = "py"
|
||||
|
||||
|
||||
class Document(BaseModel):
|
||||
source_path: str
|
||||
readable_source_path: str
|
||||
mdx_output_path: str
|
||||
mdx_output_meta_js_path: str
|
||||
|
||||
is_index: bool
|
||||
|
||||
directory: str
|
||||
basename: str
|
||||
|
||||
title: str = ""
|
||||
meta_js_entry: str = ""
|
||||
|
||||
@staticmethod
|
||||
def from_path(path: str) -> "Document":
|
||||
# example path /tmp/hatchet-python/docs/gen/runnables.md
|
||||
|
||||
basename = os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
is_index = basename == "index"
|
||||
|
||||
title = (
|
||||
"Introduction"
|
||||
if is_index
|
||||
else re.sub(
|
||||
"[^0-9a-zA-Z ]+", "", basename.replace("_", " ").replace("-", " ")
|
||||
).title()
|
||||
)
|
||||
|
||||
mdx_out_path = path.replace(
|
||||
TMP_GEN_PATH, "../../frontend/docs/pages/sdks/python"
|
||||
)
|
||||
mdx_out_dir = os.path.dirname(mdx_out_path)
|
||||
|
||||
return Document(
|
||||
directory=os.path.dirname(path).replace(TMP_GEN_PATH, ""),
|
||||
basename=basename,
|
||||
title=title,
|
||||
meta_js_entry=f"""
|
||||
"{basename}": {{
|
||||
"title": "{title}",
|
||||
"theme": {{
|
||||
"toc": true,
|
||||
}}
|
||||
}},
|
||||
""",
|
||||
source_path=path,
|
||||
readable_source_path=path.replace(TMP_GEN_PATH, "")[1:],
|
||||
mdx_output_path=mdx_out_path.replace(".md", ".mdx"),
|
||||
mdx_output_meta_js_path=mdx_out_dir + "/_meta.js",
|
||||
is_index=basename == "index",
|
||||
)
|
||||
39
sdks/python/docs/generator/utils.py
Normal file
39
sdks/python/docs/generator/utils.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import asyncio
|
||||
import shutil
|
||||
from typing import Coroutine, ParamSpec, TypeVar
|
||||
|
||||
from openai import AsyncOpenAI
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
T = TypeVar("T")
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
openai_api_key: str = "fake-key"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
client = AsyncOpenAI(api_key=settings.openai_api_key)
|
||||
|
||||
|
||||
async def gather_max_concurrency(
|
||||
*tasks: Coroutine[None, None, T],
|
||||
max_concurrency: int,
|
||||
) -> list[T]:
|
||||
"""asyncio.gather with cap on subtasks executing at once."""
|
||||
sem = asyncio.Semaphore(max_concurrency)
|
||||
|
||||
async def task_wrapper(task: Coroutine[None, None, T]) -> T:
|
||||
async with sem:
|
||||
return await task
|
||||
|
||||
return await asyncio.gather(
|
||||
*(task_wrapper(task) for task in tasks),
|
||||
return_exceptions=False,
|
||||
)
|
||||
|
||||
|
||||
def rm_rf(path: str) -> None:
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
52
sdks/python/docs/runnables.md
Normal file
52
sdks/python/docs/runnables.md
Normal file
@@ -0,0 +1,52 @@
|
||||
# Runnables
|
||||
|
||||
`Runnables` in the Hatchet SDK are things that can be run, namely tasks and workflows. The two main types of runnables you'll encounter are:
|
||||
|
||||
* `Workflow`, which lets you define tasks and call all of the run, schedule, etc. methods
|
||||
* `Standalone`, which is a single task that's returned by `hatchet.task` and can be run, scheduled, etc.
|
||||
|
||||
## Workflow
|
||||
|
||||
::: runnables.workflow.Workflow
|
||||
options:
|
||||
members:
|
||||
- task
|
||||
- durable_task
|
||||
- on_failure_task
|
||||
- on_success_task
|
||||
- run
|
||||
- aio_run
|
||||
- run_no_wait
|
||||
- aio_run_no_wait
|
||||
- run_many
|
||||
- aio_run_many
|
||||
- run_many_no_wait
|
||||
- aio_run_many_no_wait
|
||||
- schedule
|
||||
- aio_schedule
|
||||
- create_cron
|
||||
- aio_create_cron
|
||||
- create_bulk_run_item
|
||||
- name
|
||||
- tasks
|
||||
- is_durable
|
||||
|
||||
## Standalone
|
||||
|
||||
::: runnables.standalone.Standalone
|
||||
options:
|
||||
members:
|
||||
- run
|
||||
- aio_run
|
||||
- run_no_wait
|
||||
- aio_run_no_wait
|
||||
- run_many
|
||||
- aio_run_many
|
||||
- run_many_no_wait
|
||||
- aio_run_many_no_wait
|
||||
- schedule
|
||||
- aio_schedule
|
||||
- create_cron
|
||||
- aio_create_cron
|
||||
- create_bulk_run_item
|
||||
- is_durable
|
||||
@@ -9,7 +9,11 @@ import grpc
|
||||
import grpc.aio
|
||||
from pydantic import BaseModel, ConfigDict, Field, field_validator, model_validator
|
||||
|
||||
from hatchet_sdk.clients.event_ts import ThreadSafeEvent, read_with_interrupt
|
||||
from hatchet_sdk.clients.event_ts import (
|
||||
ThreadSafeEvent,
|
||||
UnexpectedEOF,
|
||||
read_with_interrupt,
|
||||
)
|
||||
from hatchet_sdk.clients.events import proto_timestamp_now
|
||||
from hatchet_sdk.clients.listeners.run_event_listener import (
|
||||
DEFAULT_ACTION_LISTENER_RETRY_INTERVAL,
|
||||
@@ -275,15 +279,17 @@ class ActionListener:
|
||||
|
||||
break
|
||||
|
||||
assigned_action, _, is_eof = t.result()
|
||||
result = t.result()
|
||||
|
||||
if is_eof:
|
||||
if isinstance(result, UnexpectedEOF):
|
||||
logger.debug("Handling EOF in Action Listener")
|
||||
self.retries = self.retries + 1
|
||||
break
|
||||
|
||||
self.retries = 0
|
||||
|
||||
assigned_action = result.data
|
||||
|
||||
try:
|
||||
action_payload = (
|
||||
ActionPayload()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import asyncio
|
||||
from typing import Callable, TypeVar, cast, overload
|
||||
from typing import Callable, Generic, TypeVar, cast, overload
|
||||
|
||||
import grpc.aio
|
||||
from grpc._cython import cygrpc # type: ignore[attr-defined]
|
||||
@@ -29,12 +29,23 @@ TRequest = TypeVar("TRequest")
|
||||
TResponse = TypeVar("TResponse")
|
||||
|
||||
|
||||
class ReadWithInterruptResult(Generic[TResponse]):
|
||||
def __init__(self, data: TResponse, key: str):
|
||||
self.data = data
|
||||
self.key = key
|
||||
|
||||
|
||||
class UnexpectedEOF:
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@overload
|
||||
async def read_with_interrupt(
|
||||
listener: grpc.aio.UnaryStreamCall[TRequest, TResponse],
|
||||
interrupt: ThreadSafeEvent,
|
||||
key_generator: Callable[[TResponse], str],
|
||||
) -> tuple[TResponse, str, bool]: ...
|
||||
) -> ReadWithInterruptResult[TResponse] | UnexpectedEOF: ...
|
||||
|
||||
|
||||
@overload
|
||||
@@ -42,23 +53,23 @@ async def read_with_interrupt(
|
||||
listener: grpc.aio.UnaryStreamCall[TRequest, TResponse],
|
||||
interrupt: ThreadSafeEvent,
|
||||
key_generator: None = None,
|
||||
) -> tuple[TResponse, None, bool]: ...
|
||||
) -> ReadWithInterruptResult[TResponse] | UnexpectedEOF: ...
|
||||
|
||||
|
||||
async def read_with_interrupt(
|
||||
listener: grpc.aio.UnaryStreamCall[TRequest, TResponse],
|
||||
interrupt: ThreadSafeEvent,
|
||||
key_generator: Callable[[TResponse], str] | None = None,
|
||||
) -> tuple[TResponse, str | None, bool]:
|
||||
) -> ReadWithInterruptResult[TResponse] | UnexpectedEOF:
|
||||
try:
|
||||
result = cast(TResponse, await listener.read())
|
||||
|
||||
if result is cygrpc.EOF:
|
||||
logger.warning("Received EOF from engine")
|
||||
return cast(TResponse, None), None, True
|
||||
return UnexpectedEOF()
|
||||
|
||||
key = key_generator(result) if key_generator else None
|
||||
key = key_generator(result) if key_generator else ""
|
||||
|
||||
return result, key, False
|
||||
return ReadWithInterruptResult(data=result, key=key)
|
||||
finally:
|
||||
interrupt.set()
|
||||
|
||||
@@ -6,7 +6,11 @@ from typing import Generic, Literal, TypeVar
|
||||
import grpc
|
||||
import grpc.aio
|
||||
|
||||
from hatchet_sdk.clients.event_ts import ThreadSafeEvent, read_with_interrupt
|
||||
from hatchet_sdk.clients.event_ts import (
|
||||
ThreadSafeEvent,
|
||||
UnexpectedEOF,
|
||||
read_with_interrupt,
|
||||
)
|
||||
from hatchet_sdk.config import ClientConfig
|
||||
from hatchet_sdk.logger import logger
|
||||
from hatchet_sdk.metadata import get_metadata
|
||||
@@ -130,18 +134,18 @@ class PooledListener(Generic[R, T, L], ABC):
|
||||
await asyncio.sleep(DEFAULT_LISTENER_RETRY_INTERVAL)
|
||||
break
|
||||
|
||||
event, key, is_eof = t.result()
|
||||
event = t.result()
|
||||
|
||||
if is_eof:
|
||||
if isinstance(event, UnexpectedEOF):
|
||||
logger.debug(
|
||||
f"Handling EOF in Pooled Listener {self.__class__.__name__}"
|
||||
)
|
||||
break
|
||||
|
||||
subscriptions = self.to_subscriptions.get(key, [])
|
||||
subscriptions = self.to_subscriptions.get(event.key, [])
|
||||
|
||||
for subscription_id in subscriptions:
|
||||
await self.events[subscription_id].put(event)
|
||||
await self.events[subscription_id].put(event.data)
|
||||
|
||||
except grpc.RpcError as e:
|
||||
logger.debug(f"grpc error in listener: {e}")
|
||||
|
||||
@@ -26,11 +26,6 @@ from hatchet_sdk.utils.typing import JSONSerializableMapping
|
||||
class CreateCronTriggerConfig(BaseModel):
|
||||
"""
|
||||
Schema for creating a workflow run triggered by a cron.
|
||||
|
||||
Attributes:
|
||||
expression (str): The cron expression defining the schedule.
|
||||
input (dict): The input data for the cron workflow.
|
||||
additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger (e.g. {"key1": "value1", "key2": "value2"}).
|
||||
"""
|
||||
|
||||
expression: str
|
||||
@@ -43,14 +38,11 @@ class CreateCronTriggerConfig(BaseModel):
|
||||
"""
|
||||
Validates the cron expression to ensure it adheres to the expected format.
|
||||
|
||||
Args:
|
||||
v (str): The cron expression to validate.
|
||||
:param v: The cron expression to validate.
|
||||
|
||||
Raises:
|
||||
ValueError: If the expression is invalid.
|
||||
:raises ValueError: If the expression is invalid
|
||||
|
||||
Returns:
|
||||
str: The validated cron expression.
|
||||
:return: The validated cron expression.
|
||||
"""
|
||||
if not v:
|
||||
raise ValueError("Cron expression is required")
|
||||
@@ -72,6 +64,10 @@ class CreateCronTriggerConfig(BaseModel):
|
||||
|
||||
|
||||
class CronClient(BaseRestClient):
|
||||
"""
|
||||
The cron client is a client for managing cron workflows within Hatchet.
|
||||
"""
|
||||
|
||||
def _wra(self, client: ApiClient) -> WorkflowRunApi:
|
||||
return WorkflowRunApi(client)
|
||||
|
||||
@@ -88,17 +84,16 @@ class CronClient(BaseRestClient):
|
||||
priority: int | None = None,
|
||||
) -> CronWorkflows:
|
||||
"""
|
||||
Asynchronously creates a new workflow cron trigger.
|
||||
Create a new workflow cron trigger.
|
||||
|
||||
Args:
|
||||
workflow_name (str): The name of the workflow to trigger.
|
||||
cron_name (str): The name of the cron trigger.
|
||||
expression (str): The cron expression defining the schedule.
|
||||
input (dict): The input data for the cron workflow.
|
||||
additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger (e.g. {"key1": "value1", "key2": "value2"}).
|
||||
:param workflow_name: The name of the workflow to trigger.
|
||||
:param cron_name: The name of the cron trigger.
|
||||
:param expression: The cron expression defining the schedule.
|
||||
:param input: The input data for the cron workflow.
|
||||
:param additional_metadata: Additional metadata associated with the cron trigger.
|
||||
:param priority: The priority of the cron workflow trigger.
|
||||
|
||||
Returns:
|
||||
CronWorkflows: The created cron workflow instance.
|
||||
:return: The created cron workflow instance.
|
||||
"""
|
||||
validated_input = CreateCronTriggerConfig(
|
||||
expression=expression, input=input, additional_metadata=additional_metadata
|
||||
@@ -126,6 +121,18 @@ class CronClient(BaseRestClient):
|
||||
additional_metadata: JSONSerializableMapping,
|
||||
priority: int | None = None,
|
||||
) -> CronWorkflows:
|
||||
"""
|
||||
Create a new workflow cron trigger.
|
||||
|
||||
:param workflow_name: The name of the workflow to trigger.
|
||||
:param cron_name: The name of the cron trigger.
|
||||
:param expression: The cron expression defining the schedule.
|
||||
:param input: The input data for the cron workflow.
|
||||
:param additional_metadata: Additional metadata associated with the cron trigger.
|
||||
:param priority: The priority of the cron workflow trigger.
|
||||
|
||||
:return: The created cron workflow instance.
|
||||
"""
|
||||
return await asyncio.to_thread(
|
||||
self.create,
|
||||
workflow_name,
|
||||
@@ -138,10 +145,10 @@ class CronClient(BaseRestClient):
|
||||
|
||||
def delete(self, cron_id: str) -> None:
|
||||
"""
|
||||
Asynchronously deletes a workflow cron trigger.
|
||||
Delete a workflow cron trigger.
|
||||
|
||||
Args:
|
||||
cron_id (str): The cron trigger ID or CronWorkflows instance to delete.
|
||||
:param cron_id: The ID of the cron trigger to delete.
|
||||
:return: None
|
||||
"""
|
||||
with self.client() as client:
|
||||
self._wa(client).workflow_cron_delete(
|
||||
@@ -149,6 +156,12 @@ class CronClient(BaseRestClient):
|
||||
)
|
||||
|
||||
async def aio_delete(self, cron_id: str) -> None:
|
||||
"""
|
||||
Delete a workflow cron trigger.
|
||||
|
||||
:param cron_id: The ID of the cron trigger to delete.
|
||||
:return: None
|
||||
"""
|
||||
return await asyncio.to_thread(self.delete, cron_id)
|
||||
|
||||
async def aio_list(
|
||||
@@ -161,18 +174,16 @@ class CronClient(BaseRestClient):
|
||||
order_by_direction: WorkflowRunOrderByDirection | None = None,
|
||||
) -> CronWorkflowsList:
|
||||
"""
|
||||
Synchronously retrieves a list of all workflow cron triggers matching the criteria.
|
||||
Retrieve a list of all workflow cron triggers matching the criteria.
|
||||
|
||||
Args:
|
||||
offset (int | None): The offset to start the list from.
|
||||
limit (int | None): The maximum number of items to return.
|
||||
workflow_id (str | None): The ID of the workflow to filter by.
|
||||
additional_metadata (list[str] | None): Filter by additional metadata keys (e.g. ["key1:value1", "key2:value2"]).
|
||||
order_by_field (CronWorkflowsOrderByField | None): The field to order the list by.
|
||||
order_by_direction (WorkflowRunOrderByDirection | None): The direction to order the list by.
|
||||
:param offset: The offset to start the list from.
|
||||
:param limit: The maximum number of items to return.
|
||||
:param workflow_id: The ID of the workflow to filter by.
|
||||
:param additional_metadata: Filter by additional metadata keys.
|
||||
:param order_by_field: The field to order the list by.
|
||||
:param order_by_direction: The direction to order the list by.
|
||||
|
||||
Returns:
|
||||
CronWorkflowsList: A list of cron workflows.
|
||||
:return: A list of cron workflows.
|
||||
"""
|
||||
return await asyncio.to_thread(
|
||||
self.list,
|
||||
@@ -194,18 +205,16 @@ class CronClient(BaseRestClient):
|
||||
order_by_direction: WorkflowRunOrderByDirection | None = None,
|
||||
) -> CronWorkflowsList:
|
||||
"""
|
||||
Asynchronously retrieves a list of all workflow cron triggers matching the criteria.
|
||||
Retrieve a list of all workflow cron triggers matching the criteria.
|
||||
|
||||
Args:
|
||||
offset (int | None): The offset to start the list from.
|
||||
limit (int | None): The maximum number of items to return.
|
||||
workflow_id (str | None): The ID of the workflow to filter by.
|
||||
additional_metadata (list[str] | None): Filter by additional metadata keys (e.g. ["key1:value1", "key2:value2"]).
|
||||
order_by_field (CronWorkflowsOrderByField | None): The field to order the list by.
|
||||
order_by_direction (WorkflowRunOrderByDirection | None): The direction to order the list by.
|
||||
:param offset: The offset to start the list from.
|
||||
:param limit: The maximum number of items to return.
|
||||
:param workflow_id: The ID of the workflow to filter by.
|
||||
:param additional_metadata: Filter by additional metadata keys.
|
||||
:param order_by_field: The field to order the list by.
|
||||
:param order_by_direction: The direction to order the list by.
|
||||
|
||||
Returns:
|
||||
CronWorkflowsList: A list of cron workflows.
|
||||
:return: A list of cron workflows.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).cron_workflow_list(
|
||||
@@ -222,13 +231,10 @@ class CronClient(BaseRestClient):
|
||||
|
||||
def get(self, cron_id: str) -> CronWorkflows:
|
||||
"""
|
||||
Asynchronously retrieves a specific workflow cron trigger by ID.
|
||||
Retrieve a specific workflow cron trigger by ID.
|
||||
|
||||
Args:
|
||||
cron_id (str): The cron trigger ID or CronWorkflows instance to retrieve.
|
||||
|
||||
Returns:
|
||||
CronWorkflows: The requested cron workflow instance.
|
||||
:param cron_id: The cron trigger ID or CronWorkflows instance to retrieve.
|
||||
:return: The requested cron workflow instance.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).workflow_cron_get(
|
||||
@@ -237,12 +243,9 @@ class CronClient(BaseRestClient):
|
||||
|
||||
async def aio_get(self, cron_id: str) -> CronWorkflows:
|
||||
"""
|
||||
Synchronously retrieves a specific workflow cron trigger by ID.
|
||||
Retrieve a specific workflow cron trigger by ID.
|
||||
|
||||
Args:
|
||||
cron_id (str): The cron trigger ID or CronWorkflows instance to retrieve.
|
||||
|
||||
Returns:
|
||||
CronWorkflows: The requested cron workflow instance.
|
||||
:param cron_id: The cron trigger ID or CronWorkflows instance to retrieve.
|
||||
:return: The requested cron workflow instance.
|
||||
"""
|
||||
return await asyncio.to_thread(self.get, cron_id)
|
||||
|
||||
@@ -7,12 +7,28 @@ from hatchet_sdk.clients.v1.api_client import BaseRestClient
|
||||
|
||||
|
||||
class LogsClient(BaseRestClient):
|
||||
"""
|
||||
The logs client is a client for interacting with Hatchet's logs API.
|
||||
"""
|
||||
|
||||
def _la(self, client: ApiClient) -> LogApi:
|
||||
return LogApi(client)
|
||||
|
||||
def list(self, task_run_id: str) -> V1LogLineList:
|
||||
"""
|
||||
List log lines for a given task run.
|
||||
|
||||
:param task_run_id: The ID of the task run to list logs for.
|
||||
:return: A list of log lines for the specified task run.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._la(client).v1_log_line_list(task=task_run_id)
|
||||
|
||||
async def aio_list(self, task_run_id: str) -> V1LogLineList:
|
||||
"""
|
||||
List log lines for a given task run.
|
||||
|
||||
:param task_run_id: The ID of the task run to list logs for.
|
||||
:return: A list of log lines for the specified task run.
|
||||
"""
|
||||
return await asyncio.to_thread(self.list, task_run_id)
|
||||
|
||||
@@ -17,6 +17,10 @@ from hatchet_sdk.utils.typing import JSONSerializableMapping
|
||||
|
||||
|
||||
class MetricsClient(BaseRestClient):
|
||||
"""
|
||||
The metrics client is a client for reading metrics out of Hatchet's metrics API.
|
||||
"""
|
||||
|
||||
def _wa(self, client: ApiClient) -> WorkflowApi:
|
||||
return WorkflowApi(client)
|
||||
|
||||
@@ -29,6 +33,15 @@ class MetricsClient(BaseRestClient):
|
||||
status: WorkflowRunStatus | None = None,
|
||||
group_key: str | None = None,
|
||||
) -> WorkflowMetrics:
|
||||
"""
|
||||
Retrieve workflow metrics for a given workflow ID.
|
||||
|
||||
:param workflow_id: The ID of the workflow to retrieve metrics for.
|
||||
:param status: The status of the workflow run to filter by.
|
||||
:param group_key: The key to group the metrics by.
|
||||
|
||||
:return: Workflow metrics for the specified workflow ID.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).workflow_get_metrics(
|
||||
workflow=workflow_id, status=status, group_key=group_key
|
||||
@@ -40,6 +53,15 @@ class MetricsClient(BaseRestClient):
|
||||
status: WorkflowRunStatus | None = None,
|
||||
group_key: str | None = None,
|
||||
) -> WorkflowMetrics:
|
||||
"""
|
||||
Retrieve workflow metrics for a given workflow ID.
|
||||
|
||||
:param workflow_id: The ID of the workflow to retrieve metrics for.
|
||||
:param status: The status of the workflow run to filter by.
|
||||
:param group_key: The key to group the metrics by.
|
||||
|
||||
:return: Workflow metrics for the specified workflow ID.
|
||||
"""
|
||||
return await asyncio.to_thread(
|
||||
self.get_workflow_metrics, workflow_id, status, group_key
|
||||
)
|
||||
@@ -49,6 +71,14 @@ class MetricsClient(BaseRestClient):
|
||||
workflow_ids: list[str] | None = None,
|
||||
additional_metadata: JSONSerializableMapping | None = None,
|
||||
) -> TenantQueueMetrics:
|
||||
"""
|
||||
Retrieve queue metrics for a set of workflow ids and additional metadata.
|
||||
|
||||
:param workflow_ids: A list of workflow IDs to retrieve metrics for.
|
||||
:param additional_metadata: Additional metadata to filter the metrics by.
|
||||
|
||||
:return: Workflow metrics for the specified workflow IDs.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).tenant_get_queue_metrics(
|
||||
tenant=self.client_config.tenant_id,
|
||||
@@ -63,15 +93,33 @@ class MetricsClient(BaseRestClient):
|
||||
workflow_ids: list[str] | None = None,
|
||||
additional_metadata: JSONSerializableMapping | None = None,
|
||||
) -> TenantQueueMetrics:
|
||||
"""
|
||||
Retrieve queue metrics for a set of workflow ids and additional metadata.
|
||||
|
||||
:param workflow_ids: A list of workflow IDs to retrieve metrics for.
|
||||
:param additional_metadata: Additional metadata to filter the metrics by.
|
||||
|
||||
:return: Workflow metrics for the specified workflow IDs.
|
||||
"""
|
||||
return await asyncio.to_thread(
|
||||
self.get_queue_metrics, workflow_ids, additional_metadata
|
||||
)
|
||||
|
||||
def get_task_metrics(self) -> TenantStepRunQueueMetrics:
|
||||
"""
|
||||
Retrieve queue metrics
|
||||
|
||||
:return: Step run queue metrics for the tenant
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._ta(client).tenant_get_step_run_queue_metrics(
|
||||
tenant=self.client_config.tenant_id
|
||||
)
|
||||
|
||||
async def aio_get_task_metrics(self) -> TenantStepRunQueueMetrics:
|
||||
"""
|
||||
Retrieve queue metrics
|
||||
|
||||
:return: Step run queue metrics for the tenant
|
||||
"""
|
||||
return await asyncio.to_thread(self.get_task_metrics)
|
||||
|
||||
@@ -12,6 +12,10 @@ from hatchet_sdk.utils.proto_enums import convert_python_enum_to_proto
|
||||
|
||||
|
||||
class RateLimitsClient(BaseRestClient):
|
||||
"""
|
||||
The rate limits client is a wrapper for Hatchet's gRPC API that makes it easier to work with rate limits in Hatchet.
|
||||
"""
|
||||
|
||||
@tenacity_retry
|
||||
def put(
|
||||
self,
|
||||
@@ -19,6 +23,16 @@ class RateLimitsClient(BaseRestClient):
|
||||
limit: int,
|
||||
duration: RateLimitDuration = RateLimitDuration.SECOND,
|
||||
) -> None:
|
||||
"""
|
||||
Put a rate limit for a given key.
|
||||
|
||||
:param key: The key to set the rate limit for.
|
||||
:param limit: The rate limit to set.
|
||||
:param duration: The duration of the rate limit.
|
||||
|
||||
:return: None
|
||||
"""
|
||||
|
||||
duration_proto = convert_python_enum_to_proto(
|
||||
duration, workflow_protos.RateLimitDuration
|
||||
)
|
||||
@@ -42,4 +56,14 @@ class RateLimitsClient(BaseRestClient):
|
||||
limit: int,
|
||||
duration: RateLimitDuration = RateLimitDuration.SECOND,
|
||||
) -> None:
|
||||
"""
|
||||
Put a rate limit for a given key.
|
||||
|
||||
:param key: The key to set the rate limit for.
|
||||
:param limit: The rate limit to set.
|
||||
:param duration: The duration of the rate limit.
|
||||
|
||||
:return: None
|
||||
"""
|
||||
|
||||
await asyncio.to_thread(self.put, key, limit, duration)
|
||||
|
||||
@@ -89,6 +89,10 @@ class BulkCancelReplayOpts(BaseModel):
|
||||
|
||||
|
||||
class RunsClient(BaseRestClient):
|
||||
"""
|
||||
The runs client is a client for interacting with task and workflow runs within Hatchet.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: ClientConfig,
|
||||
@@ -107,10 +111,22 @@ class RunsClient(BaseRestClient):
|
||||
return TaskApi(client)
|
||||
|
||||
def get(self, workflow_run_id: str) -> V1WorkflowRunDetails:
|
||||
"""
|
||||
Get workflow run details for a given workflow run ID.
|
||||
|
||||
:param workflow_run_id: The ID of the workflow run to retrieve details for.
|
||||
:return: Workflow run details for the specified workflow run ID.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wra(client).v1_workflow_run_get(str(workflow_run_id))
|
||||
|
||||
async def aio_get(self, workflow_run_id: str) -> V1WorkflowRunDetails:
|
||||
"""
|
||||
Get workflow run details for a given workflow run ID.
|
||||
|
||||
:param workflow_run_id: The ID of the workflow run to retrieve details for.
|
||||
:return: Workflow run details for the specified workflow run ID.
|
||||
"""
|
||||
return await asyncio.to_thread(self.get, workflow_run_id)
|
||||
|
||||
async def aio_list(
|
||||
@@ -126,6 +142,22 @@ class RunsClient(BaseRestClient):
|
||||
worker_id: str | None = None,
|
||||
parent_task_external_id: str | None = None,
|
||||
) -> V1TaskSummaryList:
|
||||
"""
|
||||
List task runs according to a set of filters.
|
||||
|
||||
:param since: The start time for filtering task runs.
|
||||
:param only_tasks: Whether to only list task runs.
|
||||
:param offset: The offset for pagination.
|
||||
:param limit: The maximum number of task runs to return.
|
||||
:param statuses: The statuses to filter task runs by.
|
||||
:param until: The end time for filtering task runs.
|
||||
:param additional_metadata: Additional metadata to filter task runs by.
|
||||
:param workflow_ids: The workflow IDs to filter task runs by.
|
||||
:param worker_id: The worker ID to filter task runs by.
|
||||
:param parent_task_external_id: The parent task external ID to filter task runs by.
|
||||
|
||||
:return: A list of task runs matching the specified filters.
|
||||
"""
|
||||
return await asyncio.to_thread(
|
||||
self.list,
|
||||
since=since,
|
||||
@@ -153,6 +185,22 @@ class RunsClient(BaseRestClient):
|
||||
worker_id: str | None = None,
|
||||
parent_task_external_id: str | None = None,
|
||||
) -> V1TaskSummaryList:
|
||||
"""
|
||||
List task runs according to a set of filters.
|
||||
|
||||
:param since: The start time for filtering task runs.
|
||||
:param only_tasks: Whether to only list task runs.
|
||||
:param offset: The offset for pagination.
|
||||
:param limit: The maximum number of task runs to return.
|
||||
:param statuses: The statuses to filter task runs by.
|
||||
:param until: The end time for filtering task runs.
|
||||
:param additional_metadata: Additional metadata to filter task runs by.
|
||||
:param workflow_ids: The workflow IDs to filter task runs by.
|
||||
:param worker_id: The worker ID to filter task runs by.
|
||||
:param parent_task_external_id: The parent task external ID to filter task runs by.
|
||||
|
||||
:return: A list of task runs matching the specified filters.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wra(client).v1_workflow_run_list(
|
||||
tenant=self.client_config.tenant_id,
|
||||
@@ -177,6 +225,18 @@ class RunsClient(BaseRestClient):
|
||||
additional_metadata: JSONSerializableMapping = {},
|
||||
priority: int | None = None,
|
||||
) -> V1WorkflowRunDetails:
|
||||
"""
|
||||
Trigger a new workflow run.
|
||||
|
||||
IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](https://docs.hatchet.run/sdks/python/runnables#workflow).
|
||||
|
||||
:param workflow_name: The name of the workflow to trigger.
|
||||
:param input: The input data for the workflow run.
|
||||
:param additional_metadata: Additional metadata associated with the workflow run.
|
||||
:param priority: The priority of the workflow run.
|
||||
|
||||
:return: The details of the triggered workflow run.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wra(client).v1_workflow_run_create(
|
||||
tenant=self.client_config.tenant_id,
|
||||
@@ -195,17 +255,47 @@ class RunsClient(BaseRestClient):
|
||||
additional_metadata: JSONSerializableMapping = {},
|
||||
priority: int | None = None,
|
||||
) -> V1WorkflowRunDetails:
|
||||
"""
|
||||
Trigger a new workflow run.
|
||||
|
||||
IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](https://docs.hatchet.run/sdks/python/runnables#workflow).
|
||||
|
||||
:param workflow_name: The name of the workflow to trigger.
|
||||
:param input: The input data for the workflow run.
|
||||
:param additional_metadata: Additional metadata associated with the workflow run.
|
||||
:param priority: The priority of the workflow run.
|
||||
|
||||
:return: The details of the triggered workflow run.
|
||||
"""
|
||||
return await asyncio.to_thread(
|
||||
self.create, workflow_name, input, additional_metadata, priority
|
||||
)
|
||||
|
||||
def replay(self, run_id: str) -> None:
|
||||
"""
|
||||
Replay a task or workflow run.
|
||||
|
||||
:param run_id: The external ID of the task or workflow run to replay.
|
||||
:return: None
|
||||
"""
|
||||
self.bulk_replay(opts=BulkCancelReplayOpts(ids=[run_id]))
|
||||
|
||||
async def aio_replay(self, run_id: str) -> None:
|
||||
"""
|
||||
Replay a task or workflow run.
|
||||
|
||||
:param run_id: The external ID of the task or workflow run to replay.
|
||||
:return: None
|
||||
"""
|
||||
return await asyncio.to_thread(self.replay, run_id)
|
||||
|
||||
def bulk_replay(self, opts: BulkCancelReplayOpts) -> None:
|
||||
"""
|
||||
Replay task or workflow runs in bulk, according to a set of filters.
|
||||
|
||||
:param opts: Options for bulk replay, including filters and IDs.
|
||||
:return: None
|
||||
"""
|
||||
with self.client() as client:
|
||||
self._ta(client).v1_task_replay(
|
||||
tenant=self.client_config.tenant_id,
|
||||
@@ -213,15 +303,39 @@ class RunsClient(BaseRestClient):
|
||||
)
|
||||
|
||||
async def aio_bulk_replay(self, opts: BulkCancelReplayOpts) -> None:
|
||||
"""
|
||||
Replay task or workflow runs in bulk, according to a set of filters.
|
||||
|
||||
:param opts: Options for bulk replay, including filters and IDs.
|
||||
:return: None
|
||||
"""
|
||||
return await asyncio.to_thread(self.bulk_replay, opts)
|
||||
|
||||
def cancel(self, run_id: str) -> None:
|
||||
"""
|
||||
Cancel a task or workflow run.
|
||||
|
||||
:param run_id: The external ID of the task or workflow run to cancel.
|
||||
:return: None
|
||||
"""
|
||||
self.bulk_cancel(opts=BulkCancelReplayOpts(ids=[run_id]))
|
||||
|
||||
async def aio_cancel(self, run_id: str) -> None:
|
||||
"""
|
||||
Cancel a task or workflow run.
|
||||
|
||||
:param run_id: The external ID of the task or workflow run to cancel.
|
||||
:return: None
|
||||
"""
|
||||
return await asyncio.to_thread(self.cancel, run_id)
|
||||
|
||||
def bulk_cancel(self, opts: BulkCancelReplayOpts) -> None:
|
||||
"""
|
||||
Cancel task or workflow runs in bulk, according to a set of filters.
|
||||
|
||||
:param opts: Options for bulk cancel, including filters and IDs.
|
||||
:return: None
|
||||
"""
|
||||
with self.client() as client:
|
||||
self._ta(client).v1_task_cancel(
|
||||
tenant=self.client_config.tenant_id,
|
||||
@@ -229,14 +343,43 @@ class RunsClient(BaseRestClient):
|
||||
)
|
||||
|
||||
async def aio_bulk_cancel(self, opts: BulkCancelReplayOpts) -> None:
|
||||
"""
|
||||
Cancel task or workflow runs in bulk, according to a set of filters.
|
||||
|
||||
:param opts: Options for bulk cancel, including filters and IDs.
|
||||
:return: None
|
||||
"""
|
||||
return await asyncio.to_thread(self.bulk_cancel, opts)
|
||||
|
||||
def get_result(self, run_id: str) -> JSONSerializableMapping:
|
||||
"""
|
||||
Get the result of a workflow run by its external ID.
|
||||
|
||||
:param run_id: The external ID of the workflow run to retrieve the result for.
|
||||
:return: The result of the workflow run.
|
||||
"""
|
||||
details = self.get(run_id)
|
||||
|
||||
return details.run.output
|
||||
|
||||
async def aio_get_result(self, run_id: str) -> JSONSerializableMapping:
|
||||
"""
|
||||
Get the result of a workflow run by its external ID.
|
||||
|
||||
:param run_id: The external ID of the workflow run to retrieve the result for.
|
||||
:return: The result of the workflow run.
|
||||
"""
|
||||
details = await asyncio.to_thread(self.get, run_id)
|
||||
|
||||
return details.run.output
|
||||
|
||||
def get_run_ref(self, workflow_run_id: str) -> "WorkflowRunRef":
|
||||
"""
|
||||
Get a reference to a workflow run.
|
||||
|
||||
:param workflow_run_id: The ID of the workflow run to get a reference to.
|
||||
:return: A reference to the specified workflow run.
|
||||
"""
|
||||
from hatchet_sdk.workflow_run import WorkflowRunRef
|
||||
|
||||
return WorkflowRunRef(
|
||||
@@ -245,8 +388,3 @@ class RunsClient(BaseRestClient):
|
||||
workflow_run_listener=self.workflow_run_listener,
|
||||
runs_client=self,
|
||||
)
|
||||
|
||||
async def aio_get_result(self, run_id: str) -> JSONSerializableMapping:
|
||||
details = await asyncio.to_thread(self.get, run_id)
|
||||
|
||||
return details.run.output
|
||||
|
||||
@@ -27,6 +27,10 @@ from hatchet_sdk.utils.typing import JSONSerializableMapping
|
||||
|
||||
|
||||
class ScheduledClient(BaseRestClient):
|
||||
"""
|
||||
The scheduled client is a client for managing scheduled workflows within Hatchet.
|
||||
"""
|
||||
|
||||
def _wra(self, client: ApiClient) -> WorkflowRunApi:
|
||||
return WorkflowRunApi(client)
|
||||
|
||||
@@ -41,16 +45,16 @@ class ScheduledClient(BaseRestClient):
|
||||
additional_metadata: JSONSerializableMapping,
|
||||
) -> ScheduledWorkflows:
|
||||
"""
|
||||
Creates a new scheduled workflow run asynchronously.
|
||||
Creates a new scheduled workflow run.
|
||||
|
||||
Args:
|
||||
workflow_name (str): The name of the scheduled workflow.
|
||||
trigger_at (datetime.datetime): The datetime when the run should be triggered.
|
||||
input (JSONSerializableMapping): The input data for the scheduled workflow.
|
||||
additional_metadata (JSONSerializableMapping): Additional metadata associated with the future run.
|
||||
IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](https://docs.hatchet.run/sdks/python/runnables#workflow).
|
||||
|
||||
Returns:
|
||||
ScheduledWorkflows: The created scheduled workflow instance.
|
||||
:param workflow_name: The name of the workflow to schedule.
|
||||
:param trigger_at: The datetime when the run should be triggered.
|
||||
:param input: The input data for the scheduled workflow.
|
||||
:param additional_metadata: Additional metadata associated with the future run as a key-value pair.
|
||||
|
||||
:return: The created scheduled workflow instance.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wra(client).scheduled_workflow_run_create(
|
||||
@@ -71,16 +75,16 @@ class ScheduledClient(BaseRestClient):
|
||||
additional_metadata: JSONSerializableMapping,
|
||||
) -> ScheduledWorkflows:
|
||||
"""
|
||||
Creates a new scheduled workflow run asynchronously.
|
||||
Creates a new scheduled workflow run.
|
||||
|
||||
Args:
|
||||
workflow_name (str): The name of the scheduled workflow.
|
||||
trigger_at (datetime.datetime): The datetime when the run should be triggered.
|
||||
input (JSONSerializableMapping): The input data for the scheduled workflow.
|
||||
additional_metadata (JSONSerializableMapping): Additional metadata associated with the future run as a key-value pair (e.g. {"key1": "value1", "key2": "value2"}).
|
||||
IMPORTANT: It's preferable to use `Workflow.run` (and similar) to trigger workflows if possible. This method is intended to be an escape hatch. For more details, see [the documentation](https://docs.hatchet.run/sdks/python/runnables#workflow).
|
||||
|
||||
Returns:
|
||||
ScheduledWorkflows: The created scheduled workflow instance.
|
||||
:param workflow_name: The name of the workflow to schedule.
|
||||
:param trigger_at: The datetime when the run should be triggered.
|
||||
:param input: The input data for the scheduled workflow.
|
||||
:param additional_metadata: Additional metadata associated with the future run as a key-value pair.
|
||||
|
||||
:return: The created scheduled workflow instance.
|
||||
"""
|
||||
|
||||
return await asyncio.to_thread(
|
||||
@@ -93,10 +97,10 @@ class ScheduledClient(BaseRestClient):
|
||||
|
||||
def delete(self, scheduled_id: str) -> None:
|
||||
"""
|
||||
Deletes a scheduled workflow run.
|
||||
Deletes a scheduled workflow run by its ID.
|
||||
|
||||
Args:
|
||||
scheduled_id (str): The scheduled workflow trigger ID to delete.
|
||||
:param scheduled_id: The ID of the scheduled workflow run to delete.
|
||||
:return: None
|
||||
"""
|
||||
with self.client() as client:
|
||||
self._wa(client).workflow_scheduled_delete(
|
||||
@@ -105,6 +109,12 @@ class ScheduledClient(BaseRestClient):
|
||||
)
|
||||
|
||||
async def aio_delete(self, scheduled_id: str) -> None:
|
||||
"""
|
||||
Deletes a scheduled workflow run by its ID.
|
||||
|
||||
:param scheduled_id: The ID of the scheduled workflow run to delete.
|
||||
:return: None
|
||||
"""
|
||||
await asyncio.to_thread(self.delete, scheduled_id)
|
||||
|
||||
async def aio_list(
|
||||
@@ -121,18 +131,16 @@ class ScheduledClient(BaseRestClient):
|
||||
"""
|
||||
Retrieves a list of scheduled workflows based on provided filters.
|
||||
|
||||
Args:
|
||||
offset (int | None): The starting point for the list.
|
||||
limit (int | None): The maximum number of items to return.
|
||||
workflow_id (str | None): Filter by specific workflow ID.
|
||||
parent_workflow_run_id (str | None): Filter by parent workflow run ID.
|
||||
statuses (list[ScheduledRunStatus] | None): Filter by status.
|
||||
additional_metadata (Optional[List[dict[str, str]]]): Filter by additional metadata.
|
||||
order_by_field (Optional[ScheduledWorkflowsOrderByField]): Field to order the results by.
|
||||
order_by_direction (Optional[WorkflowRunOrderByDirection]): Direction to order the results.
|
||||
:param offset: The offset to use in pagination.
|
||||
:param limit: The maximum number of scheduled workflows to return.
|
||||
:param workflow_id: The ID of the workflow to filter by.
|
||||
:param parent_workflow_run_id: The ID of the parent workflow run to filter by.
|
||||
:param statuses: A list of statuses to filter by.
|
||||
:param additional_metadata: Additional metadata to filter by.
|
||||
:param order_by_field: The field to order the results by.
|
||||
:param order_by_direction: The direction to order the results by.
|
||||
|
||||
Returns:
|
||||
List[ScheduledWorkflows]: A list of scheduled workflows matching the criteria.
|
||||
:return: A list of scheduled workflows matching the provided filters.
|
||||
"""
|
||||
return await asyncio.to_thread(
|
||||
self.list,
|
||||
@@ -160,18 +168,16 @@ class ScheduledClient(BaseRestClient):
|
||||
"""
|
||||
Retrieves a list of scheduled workflows based on provided filters.
|
||||
|
||||
Args:
|
||||
offset (int | None): The starting point for the list.
|
||||
limit (int | None): The maximum number of items to return.
|
||||
workflow_id (str | None): Filter by specific workflow ID.
|
||||
parent_workflow_run_id (str | None): Filter by parent workflow run ID.
|
||||
statuses (list[ScheduledRunStatus] | None): Filter by status.
|
||||
additional_metadata (Optional[List[dict[str, str]]]): Filter by additional metadata.
|
||||
order_by_field (Optional[ScheduledWorkflowsOrderByField]): Field to order the results by.
|
||||
order_by_direction (Optional[WorkflowRunOrderByDirection]): Direction to order the results.
|
||||
:param offset: The offset to use in pagination.
|
||||
:param limit: The maximum number of scheduled workflows to return.
|
||||
:param workflow_id: The ID of the workflow to filter by.
|
||||
:param parent_workflow_run_id: The ID of the parent workflow run to filter by.
|
||||
:param statuses: A list of statuses to filter by.
|
||||
:param additional_metadata: Additional metadata to filter by.
|
||||
:param order_by_field: The field to order the results by.
|
||||
:param order_by_direction: The direction to order the results by.
|
||||
|
||||
Returns:
|
||||
List[ScheduledWorkflows]: A list of scheduled workflows matching the criteria.
|
||||
:return: A list of scheduled workflows matching the provided filters.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).workflow_scheduled_list(
|
||||
@@ -192,11 +198,8 @@ class ScheduledClient(BaseRestClient):
|
||||
"""
|
||||
Retrieves a specific scheduled workflow by scheduled run trigger ID.
|
||||
|
||||
Args:
|
||||
scheduled (str): The scheduled workflow trigger ID to retrieve.
|
||||
|
||||
Returns:
|
||||
ScheduledWorkflows: The requested scheduled workflow instance.
|
||||
:param scheduled_id: The scheduled workflow trigger ID to retrieve.
|
||||
:return: The requested scheduled workflow instance.
|
||||
"""
|
||||
|
||||
with self.client() as client:
|
||||
@@ -209,10 +212,7 @@ class ScheduledClient(BaseRestClient):
|
||||
"""
|
||||
Retrieves a specific scheduled workflow by scheduled run trigger ID.
|
||||
|
||||
Args:
|
||||
scheduled (str): The scheduled workflow trigger ID to retrieve.
|
||||
|
||||
Returns:
|
||||
ScheduledWorkflows: The requested scheduled workflow instance.
|
||||
:param scheduled_id: The scheduled workflow trigger ID to retrieve.
|
||||
:return: The requested scheduled workflow instance.
|
||||
"""
|
||||
return await asyncio.to_thread(self.get, scheduled_id)
|
||||
|
||||
@@ -9,19 +9,40 @@ from hatchet_sdk.clients.v1.api_client import BaseRestClient
|
||||
|
||||
|
||||
class WorkersClient(BaseRestClient):
|
||||
"""
|
||||
The workers client is a client for managing workers programmatically within Hatchet.
|
||||
"""
|
||||
|
||||
def _wa(self, client: ApiClient) -> WorkerApi:
|
||||
return WorkerApi(client)
|
||||
|
||||
def get(self, worker_id: str) -> Worker:
|
||||
"""
|
||||
Get a worker by its ID.
|
||||
|
||||
:param worker_id: The ID of the worker to retrieve.
|
||||
:return: The worker.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).worker_get(worker_id)
|
||||
|
||||
async def aio_get(self, worker_id: str) -> Worker:
|
||||
"""
|
||||
Get a worker by its ID.
|
||||
|
||||
:param worker_id: The ID of the worker to retrieve.
|
||||
:return: The worker.
|
||||
"""
|
||||
return await asyncio.to_thread(self.get, worker_id)
|
||||
|
||||
def list(
|
||||
self,
|
||||
) -> WorkerList:
|
||||
"""
|
||||
List all workers in the tenant determined by the client config.
|
||||
|
||||
:return: A list of workers.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).worker_list(
|
||||
tenant=self.client_config.tenant_id,
|
||||
@@ -30,9 +51,21 @@ class WorkersClient(BaseRestClient):
|
||||
async def aio_list(
|
||||
self,
|
||||
) -> WorkerList:
|
||||
"""
|
||||
List all workers in the tenant determined by the client config.
|
||||
|
||||
:return: A list of workers.
|
||||
"""
|
||||
return await asyncio.to_thread(self.list)
|
||||
|
||||
def update(self, worker_id: str, opts: UpdateWorkerRequest) -> Worker:
|
||||
"""
|
||||
Update a worker by its ID.
|
||||
|
||||
:param worker_id: The ID of the worker to update.
|
||||
:param opts: The update options.
|
||||
:return: The updated worker.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).worker_update(
|
||||
worker=worker_id,
|
||||
@@ -40,4 +73,11 @@ class WorkersClient(BaseRestClient):
|
||||
)
|
||||
|
||||
async def aio_update(self, worker_id: str, opts: UpdateWorkerRequest) -> Worker:
|
||||
"""
|
||||
Update a worker by its ID.
|
||||
|
||||
:param worker_id: The ID of the worker to update.
|
||||
:param opts: The update options.
|
||||
:return: The updated worker.
|
||||
"""
|
||||
return await asyncio.to_thread(self.update, worker_id, opts)
|
||||
|
||||
@@ -10,6 +10,12 @@ from hatchet_sdk.clients.v1.api_client import BaseRestClient
|
||||
|
||||
|
||||
class WorkflowsClient(BaseRestClient):
|
||||
"""
|
||||
The workflows client is a client for managing workflows programmatically within Hatchet.
|
||||
|
||||
Note that workflows are the declaration, _not_ the individual runs. If you're looking for runs, use the `RunsClient` instead.
|
||||
"""
|
||||
|
||||
def _wra(self, client: ApiClient) -> WorkflowRunApi:
|
||||
return WorkflowRunApi(client)
|
||||
|
||||
@@ -17,9 +23,21 @@ class WorkflowsClient(BaseRestClient):
|
||||
return WorkflowApi(client)
|
||||
|
||||
async def aio_get(self, workflow_id: str) -> Workflow:
|
||||
"""
|
||||
Get a workflow by its ID.
|
||||
|
||||
:param workflow_id: The ID of the workflow to retrieve.
|
||||
:return: The workflow.
|
||||
"""
|
||||
return await asyncio.to_thread(self.get, workflow_id)
|
||||
|
||||
def get(self, workflow_id: str) -> Workflow:
|
||||
"""
|
||||
Get a workflow by its ID.
|
||||
|
||||
:param workflow_id: The ID of the workflow to retrieve.
|
||||
:return: The workflow.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).workflow_get(workflow_id)
|
||||
|
||||
@@ -29,6 +47,15 @@ class WorkflowsClient(BaseRestClient):
|
||||
limit: int | None = None,
|
||||
offset: int | None = None,
|
||||
) -> WorkflowList:
|
||||
"""
|
||||
List all workflows in the tenant determined by the client config that match optional filters.
|
||||
|
||||
:param workflow_name: The name of the workflow to filter by.
|
||||
:param limit: The maximum number of items to return.
|
||||
:param offset: The offset to start the list from.
|
||||
|
||||
:return: A list of workflows.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).workflow_list(
|
||||
tenant=self.client_config.tenant_id,
|
||||
@@ -43,15 +70,38 @@ class WorkflowsClient(BaseRestClient):
|
||||
limit: int | None = None,
|
||||
offset: int | None = None,
|
||||
) -> WorkflowList:
|
||||
"""
|
||||
List all workflows in the tenant determined by the client config that match optional filters.
|
||||
|
||||
:param workflow_name: The name of the workflow to filter by.
|
||||
:param limit: The maximum number of items to return.
|
||||
:param offset: The offset to start the list from.
|
||||
|
||||
:return: A list of workflows.
|
||||
"""
|
||||
return await asyncio.to_thread(self.list, workflow_name, limit, offset)
|
||||
|
||||
def get_version(
|
||||
self, workflow_id: str, version: str | None = None
|
||||
) -> WorkflowVersion:
|
||||
"""
|
||||
Get a workflow version by the workflow ID and an optional version.
|
||||
|
||||
:param workflow_id: The ID of the workflow to retrieve the version for.
|
||||
:param version: The version of the workflow to retrieve. If None, the latest version is returned.
|
||||
:return: The workflow version.
|
||||
"""
|
||||
with self.client() as client:
|
||||
return self._wa(client).workflow_version_get(workflow_id, version)
|
||||
|
||||
async def aio_get_version(
|
||||
self, workflow_id: str, version: str | None = None
|
||||
) -> WorkflowVersion:
|
||||
"""
|
||||
Get a workflow version by the workflow ID and an optional version.
|
||||
|
||||
:param workflow_id: The ID of the workflow to retrieve the version for.
|
||||
:param version: The version of the workflow to retrieve. If None, the latest version is returned.
|
||||
:return: The workflow version.
|
||||
"""
|
||||
return await asyncio.to_thread(self.get_version, workflow_id, version)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Callable, Type, cast, overload
|
||||
from typing import Any, Callable, Type, Union, cast, overload
|
||||
|
||||
from hatchet_sdk import Context, DurableContext
|
||||
from hatchet_sdk.client import Client
|
||||
@@ -41,15 +41,7 @@ class Hatchet:
|
||||
Main client for interacting with the Hatchet SDK.
|
||||
|
||||
This class provides access to various client interfaces and utility methods
|
||||
for working with Hatchet workers, workflows, and steps.
|
||||
|
||||
Attributes:
|
||||
cron (CronClient): Interface for cron trigger operations.
|
||||
|
||||
admin (AdminClient): Interface for administrative operations.
|
||||
dispatcher (DispatcherClient): Interface for dispatching operations.
|
||||
event (EventClient): Interface for event-related operations.
|
||||
rest (RestApi): Interface for REST API operations.
|
||||
for working with Hatchet workers, workflows, tasks, and our various feature clients.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
@@ -58,19 +50,6 @@ class Hatchet:
|
||||
client: Client | None = None,
|
||||
config: ClientConfig | None = None,
|
||||
):
|
||||
"""
|
||||
Initialize a new Hatchet instance.
|
||||
|
||||
:param debug: Enable debug logging. Default: `False`
|
||||
:type debug: bool
|
||||
|
||||
:param client: A pre-configured `Client` instance. Default: `None`.
|
||||
:type client: Client | None
|
||||
|
||||
:param config: Configuration for creating a new Client. Defaults to ClientConfig()
|
||||
:type config: ClientConfig
|
||||
"""
|
||||
|
||||
if debug:
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
@@ -80,34 +59,60 @@ class Hatchet:
|
||||
|
||||
@property
|
||||
def cron(self) -> CronClient:
|
||||
"""
|
||||
The cron client is a client for managing cron workflows within Hatchet.
|
||||
"""
|
||||
return self._client.cron
|
||||
|
||||
@property
|
||||
def logs(self) -> LogsClient:
|
||||
"""
|
||||
The logs client is a client for interacting with Hatchet's logs API.
|
||||
"""
|
||||
return self._client.logs
|
||||
|
||||
@property
|
||||
def metrics(self) -> MetricsClient:
|
||||
"""
|
||||
The metrics client is a client for reading metrics out of Hatchet's metrics API.
|
||||
"""
|
||||
return self._client.metrics
|
||||
|
||||
@property
|
||||
def rate_limits(self) -> RateLimitsClient:
|
||||
"""
|
||||
The rate limits client is a wrapper for Hatchet's gRPC API that makes it easier to work with rate limits in Hatchet.
|
||||
"""
|
||||
return self._client.rate_limits
|
||||
|
||||
@property
|
||||
def runs(self) -> RunsClient:
|
||||
"""
|
||||
The runs client is a client for interacting with task and workflow runs within Hatchet.
|
||||
"""
|
||||
return self._client.runs
|
||||
|
||||
@property
|
||||
def scheduled(self) -> ScheduledClient:
|
||||
"""
|
||||
The scheduled client is a client for managing scheduled workflows within Hatchet.
|
||||
"""
|
||||
return self._client.scheduled
|
||||
|
||||
@property
|
||||
def workers(self) -> WorkersClient:
|
||||
"""
|
||||
The workers client is a client for managing workers programmatically within Hatchet.
|
||||
"""
|
||||
return self._client.workers
|
||||
|
||||
@property
|
||||
def workflows(self) -> WorkflowsClient:
|
||||
"""
|
||||
The workflows client is a client for managing workflows programmatically within Hatchet.
|
||||
|
||||
Note that workflows are the declaration, _not_ the individual runs. If you're looking for runs, use the `RunsClient` instead.
|
||||
"""
|
||||
return self._client.workflows
|
||||
|
||||
@property
|
||||
@@ -116,6 +121,9 @@ class Hatchet:
|
||||
|
||||
@property
|
||||
def event(self) -> EventClient:
|
||||
"""
|
||||
The event client, which you can use to push events to Hatchet.
|
||||
"""
|
||||
return self._client.event
|
||||
|
||||
@property
|
||||
@@ -128,14 +136,24 @@ class Hatchet:
|
||||
|
||||
@property
|
||||
def tenant_id(self) -> str:
|
||||
"""
|
||||
The tenant id you're operating in.
|
||||
"""
|
||||
return self._client.config.tenant_id
|
||||
|
||||
@property
|
||||
def namespace(self) -> str:
|
||||
"""
|
||||
The current namespace you're interacting with.
|
||||
"""
|
||||
return self._client.config.namespace
|
||||
|
||||
def worker(
|
||||
self,
|
||||
name: str,
|
||||
slots: int = 100,
|
||||
durable_slots: int = 1_000,
|
||||
labels: dict[str, str | int] = {},
|
||||
labels: dict[str, Union[str, int]] = {},
|
||||
workflows: list[BaseWorkflow[Any]] = [],
|
||||
lifespan: LifespanFn | None = None,
|
||||
) -> Worker:
|
||||
@@ -143,20 +161,18 @@ class Hatchet:
|
||||
Create a Hatchet worker on which to run workflows.
|
||||
|
||||
:param name: The name of the worker.
|
||||
:type name: str
|
||||
|
||||
:param slots: The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time. Default: 100
|
||||
:type slots: int
|
||||
:param slots: The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time
|
||||
|
||||
:param labels: A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. Defaults to an empty dictionary (no labels)
|
||||
:type labels: dict[str, str | int]
|
||||
:param durable_slots: The number of durable workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time that are durable.
|
||||
|
||||
:param workflows: A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. Defaults to an empty list
|
||||
:type workflows: list[Workflow]
|
||||
:param labels: A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels.
|
||||
|
||||
:param workflows: A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them.
|
||||
|
||||
:param lifespan: A lifespan function to run on the worker. This function will be called when the worker is started, and can be used to perform any setup or teardown tasks.
|
||||
|
||||
:returns: The created `Worker` object, which exposes an instance method `start` which can be called to start the worker.
|
||||
:rtype: Worker
|
||||
"""
|
||||
|
||||
try:
|
||||
@@ -226,37 +242,26 @@ class Hatchet:
|
||||
Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedule`d, and so on.
|
||||
|
||||
:param name: The name of the workflow.
|
||||
:type name: str
|
||||
|
||||
:param description: A description for the workflow. Default: None
|
||||
:type description: str | None
|
||||
|
||||
:param version: A version for the workflow. Default: None
|
||||
:type version: str | None
|
||||
:param description: A description for the workflow
|
||||
|
||||
:param input_validator: A Pydantic model to use as a validator for the `input` to the tasks in the workflow. If no validator is provided, defaults to an `EmptyModel` under the hood. The `EmptyModel` is a Pydantic model with no fields specified, and with the `extra` config option set to `"allow"`.
|
||||
:type input_validator: Type[BaseModel]
|
||||
|
||||
:param on_events: A list of event triggers for the workflow - events which cause the workflow to be run. Defaults to an empty list, meaning the workflow will not be run on any event pushes.
|
||||
:type on_events: list[str]
|
||||
:param on_events: A list of event triggers for the workflow - events which cause the workflow to be run.
|
||||
|
||||
:param on_crons: A list of cron triggers for the workflow. Defaults to an empty list, meaning the workflow will not be run on any cron schedules.
|
||||
:type on_crons: list[str]
|
||||
:param on_crons: A list of cron triggers for the workflow.
|
||||
|
||||
:param sticky: A sticky strategy for the workflow. Default: `None`
|
||||
:type sticky: StickyStategy
|
||||
:param version: A version for the workflow
|
||||
|
||||
:param default_priority: The priority of the workflow. Higher values will cause this workflow to have priority in scheduling over other, lower priority ones. Default: `1`
|
||||
:type default_priority: int
|
||||
:param sticky: A sticky strategy for the workflow
|
||||
|
||||
:param default_priority: The priority of the workflow. Higher values will cause this workflow to have priority in scheduling over other, lower priority ones.
|
||||
|
||||
:param concurrency: A concurrency object controlling the concurrency settings for this workflow.
|
||||
:type concurrency: ConcurrencyExpression | None
|
||||
|
||||
:param task_defaults: A `TaskDefaults` object controlling the default task settings for this workflow.
|
||||
:type task_defaults: TaskDefaults
|
||||
|
||||
:returns: The created `Workflow` object, which can be used to declare tasks, run the workflow, and so on.
|
||||
:rtype: Workflow
|
||||
"""
|
||||
|
||||
return Workflow[TWorkflowInput](
|
||||
@@ -351,55 +356,38 @@ class Hatchet:
|
||||
A decorator to transform a function into a standalone Hatchet task that runs as part of a workflow.
|
||||
|
||||
:param name: The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator.
|
||||
:type name: str
|
||||
|
||||
:param description: An optional description for the task. Default: None
|
||||
:type description: str | None
|
||||
:param description: An optional description for the task.
|
||||
|
||||
:param input_validator: A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`.
|
||||
:type input_validator: Type[BaseModel]
|
||||
|
||||
:param on_events: A list of event triggers for the task - events which cause the task to be run. Defaults to an empty list.
|
||||
:type on_events: list[str]
|
||||
:param on_events: A list of event triggers for the task - events which cause the task to be run.
|
||||
|
||||
:param on_crons: A list of cron triggers for the task. Defaults to an empty list.
|
||||
:type on_crons: list[str]
|
||||
:param on_crons: A list of cron triggers for the task.
|
||||
|
||||
:param version: A version for the task. Default: None
|
||||
:type version: str | None
|
||||
:param version: A version for the task.
|
||||
|
||||
:param sticky: A sticky strategy for the task. Default: None
|
||||
:type sticky: StickyStrategy | None
|
||||
:param sticky: A sticky strategy for the task.
|
||||
|
||||
:param default_priority: The priority of the task. Higher values will cause this task to have priority in scheduling. Default: 1
|
||||
:type default_priority: int
|
||||
:param default_priority: The priority of the task. Higher values will cause this task to have priority in scheduling.
|
||||
|
||||
:param concurrency: A concurrency object controlling the concurrency settings for this task.
|
||||
:type concurrency: ConcurrencyExpression | None
|
||||
|
||||
:param schedule_timeout: The maximum time allowed for scheduling the task. Default: DEFAULT_SCHEDULE_TIMEOUT
|
||||
:type schedule_timeout: Duration
|
||||
:param schedule_timeout: The maximum time allowed for scheduling the task.
|
||||
|
||||
:param execution_timeout: The maximum time allowed for executing the task. Default: DEFAULT_EXECUTION_TIMEOUT
|
||||
:type execution_timeout: Duration
|
||||
:param execution_timeout: The maximum time allowed for executing the task.
|
||||
|
||||
:param retries: The number of times to retry the task before failing. Default: 0
|
||||
:type retries: int
|
||||
:param retries: The number of times to retry the task before failing.
|
||||
|
||||
:param rate_limits: A list of rate limit configurations for the task. Defaults to an empty list.
|
||||
:type rate_limits: list[RateLimit]
|
||||
:param rate_limits: A list of rate limit configurations for the task.
|
||||
|
||||
:param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned.
|
||||
:type desired_worker_labels: dict[str, DesiredWorkerLabel]
|
||||
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: None
|
||||
:type backoff_factor: float | None
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries.
|
||||
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: None
|
||||
:type backoff_max_seconds: int | None
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue.
|
||||
|
||||
:returns: A decorator which creates a `Standalone` task object.
|
||||
:rtype: Callable[[Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]]
|
||||
"""
|
||||
|
||||
workflow = Workflow[TWorkflowInput](
|
||||
@@ -411,6 +399,7 @@ class Hatchet:
|
||||
on_crons=on_crons,
|
||||
sticky=sticky,
|
||||
concurrency=concurrency,
|
||||
default_priority=default_priority,
|
||||
input_validator=input_validator
|
||||
or cast(Type[TWorkflowInput], EmptyModel),
|
||||
),
|
||||
@@ -527,55 +516,38 @@ class Hatchet:
|
||||
A decorator to transform a function into a standalone Hatchet _durable_ task that runs as part of a workflow.
|
||||
|
||||
:param name: The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator.
|
||||
:type name: str
|
||||
|
||||
:param description: An optional description for the task. Default: None
|
||||
:type description: str | None
|
||||
:param description: An optional description for the task.
|
||||
|
||||
:param input_validator: A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`.
|
||||
:type input_validator: Type[BaseModel]
|
||||
|
||||
:param on_events: A list of event triggers for the task - events which cause the task to be run. Defaults to an empty list.
|
||||
:type on_events: list[str]
|
||||
:param on_events: A list of event triggers for the task - events which cause the task to be run.
|
||||
|
||||
:param on_crons: A list of cron triggers for the task. Defaults to an empty list.
|
||||
:type on_crons: list[str]
|
||||
:param on_crons: A list of cron triggers for the task.
|
||||
|
||||
:param version: A version for the task. Default: None
|
||||
:type version: str | None
|
||||
:param version: A version for the task.
|
||||
|
||||
:param sticky: A sticky strategy for the task. Default: None
|
||||
:type sticky: StickyStrategy | None
|
||||
:param sticky: A sticky strategy for the task.
|
||||
|
||||
:param default_priority: The priority of the task. Higher values will cause this task to have priority in scheduling. Default: 1
|
||||
:type default_priority: int
|
||||
:param default_priority: The priority of the task. Higher values will cause this task to have priority in scheduling.
|
||||
|
||||
:param concurrency: A concurrency object controlling the concurrency settings for this task.
|
||||
:type concurrency: ConcurrencyExpression | None
|
||||
|
||||
:param schedule_timeout: The maximum time allowed for scheduling the task. Default: DEFAULT_SCHEDULE_TIMEOUT
|
||||
:type schedule_timeout: Duration
|
||||
:param schedule_timeout: The maximum time allowed for scheduling the task.
|
||||
|
||||
:param execution_timeout: The maximum time allowed for executing the task. Default: DEFAULT_EXECUTION_TIMEOUT
|
||||
:type execution_timeout: Duration
|
||||
:param execution_timeout: The maximum time allowed for executing the task.
|
||||
|
||||
:param retries: The number of times to retry the task before failing. Default: 0
|
||||
:type retries: int
|
||||
:param retries: The number of times to retry the task before failing.
|
||||
|
||||
:param rate_limits: A list of rate limit configurations for the task. Defaults to an empty list.
|
||||
:type rate_limits: list[RateLimit]
|
||||
:param rate_limits: A list of rate limit configurations for the task.
|
||||
|
||||
:param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned.
|
||||
:type desired_worker_labels: dict[str, DesiredWorkerLabel]
|
||||
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: None
|
||||
:type backoff_factor: float | None
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries.
|
||||
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: None
|
||||
:type backoff_max_seconds: int | None
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue.
|
||||
|
||||
:returns: A decorator which creates a `Standalone` task object.
|
||||
:rtype: Callable[[Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]]
|
||||
"""
|
||||
|
||||
workflow = Workflow[TWorkflowInput](
|
||||
@@ -589,6 +561,7 @@ class Hatchet:
|
||||
concurrency=concurrency,
|
||||
input_validator=input_validator
|
||||
or cast(Type[TWorkflowInput], EmptyModel),
|
||||
default_priority=default_priority,
|
||||
),
|
||||
self,
|
||||
)
|
||||
|
||||
@@ -59,7 +59,6 @@ def create_traceparent() -> str | None:
|
||||
:returns: A W3C-formatted traceparent header value if successful, None if the context
|
||||
injection fails or no active span exists.\n
|
||||
Example: `00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01`
|
||||
:rtype: str | None:
|
||||
"""
|
||||
|
||||
carrier: dict[str, str] = {}
|
||||
@@ -79,10 +78,9 @@ def parse_carrier_from_metadata(
|
||||
|
||||
:param metadata: A dictionary containing metadata key-value pairs,
|
||||
potentially including the `traceparent` header. Can be None.
|
||||
:type metadata: dict[str, str] | None
|
||||
|
||||
:returns: The extracted OpenTelemetry Context object if a valid `traceparent`
|
||||
is found in the metadata, otherwise None.
|
||||
:rtype: Context | None
|
||||
|
||||
:Example:
|
||||
|
||||
@@ -112,13 +110,12 @@ def inject_traceparent_into_metadata(
|
||||
`OTEL_TRACEPARENT_KEY`. If no `traceparent` is provided, it attempts to create one.
|
||||
|
||||
:param metadata: The metadata dictionary to inject the `traceparent` into.
|
||||
:type metadata: dict[str, str]
|
||||
|
||||
:param traceparent: The `traceparent` string to inject. If None, attempts to use
|
||||
the current span.
|
||||
:type traceparent: str | None, optional
|
||||
|
||||
:returns: A new metadata dictionary containing the original metadata plus
|
||||
the injected `traceparent`, if one was available or could be created.
|
||||
:rtype: dict[str, str]
|
||||
|
||||
:Example:
|
||||
|
||||
@@ -141,22 +138,23 @@ def inject_traceparent_into_metadata(
|
||||
|
||||
|
||||
class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
||||
"""
|
||||
Hatchet OpenTelemetry instrumentor.
|
||||
|
||||
The instrumentor provides an OpenTelemetry integration for Hatchet by setting up
|
||||
tracing and metrics collection.
|
||||
|
||||
:param tracer_provider: TracerProvider | None: The OpenTelemetry TracerProvider to use.
|
||||
If not provided, the global tracer provider will be used.
|
||||
:param meter_provider: MeterProvider | None: The OpenTelemetry MeterProvider to use.
|
||||
If not provided, a no-op meter provider will be used.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tracer_provider: TracerProvider | None = None,
|
||||
meter_provider: MeterProvider | None = None,
|
||||
):
|
||||
"""
|
||||
Hatchet OpenTelemetry instrumentor.
|
||||
|
||||
The instrumentor provides an OpenTelemetry integration for Hatchet by setting up
|
||||
tracing and metrics collection.
|
||||
|
||||
:param tracer_provider: TracerProvider | None: The OpenTelemetry TracerProvider to use.
|
||||
If not provided, the global tracer provider will be used.
|
||||
:param meter_provider: MeterProvider | None: The OpenTelemetry MeterProvider to use.
|
||||
If not provided, a no-op meter provider will be used.
|
||||
"""
|
||||
|
||||
self.tracer_provider = tracer_provider or get_tracer_provider()
|
||||
self.meter_provider = meter_provider or NoOpMeterProvider()
|
||||
|
||||
@@ -75,6 +75,15 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> R:
|
||||
"""
|
||||
Synchronously trigger a workflow run without waiting for it to complete.
|
||||
This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
|
||||
:param input: The input data for the workflow.
|
||||
:param options: Additional options for workflow execution.
|
||||
|
||||
:returns: A `WorkflowRunRef` object representing the reference to the workflow run.
|
||||
"""
|
||||
return self._extract_result(self._workflow.run(input, options))
|
||||
|
||||
async def aio_run(
|
||||
@@ -82,6 +91,16 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> R:
|
||||
"""
|
||||
Run the workflow asynchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
|
||||
:param input: The input data for the workflow, must match the workflow's input type.
|
||||
:param options: Additional options for workflow execution like metadata and parent workflow ID.
|
||||
|
||||
:returns: The result of the workflow execution as a dictionary.
|
||||
"""
|
||||
result = await self._workflow.aio_run(input, options)
|
||||
return self._extract_result(result)
|
||||
|
||||
@@ -90,6 +109,16 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> TaskRunRef[TWorkflowInput, R]:
|
||||
"""
|
||||
Run the workflow synchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
|
||||
:param input: The input data for the workflow, must match the workflow's input type.
|
||||
:param options: Additional options for workflow execution like metadata and parent workflow ID.
|
||||
|
||||
:returns: The result of the workflow execution as a dictionary.
|
||||
"""
|
||||
ref = self._workflow.run_no_wait(input, options)
|
||||
|
||||
return TaskRunRef[TWorkflowInput, R](self, ref)
|
||||
@@ -99,17 +128,40 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> TaskRunRef[TWorkflowInput, R]:
|
||||
"""
|
||||
Asynchronously trigger a workflow run without waiting for it to complete.
|
||||
This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
|
||||
:param input: The input data for the workflow.
|
||||
:param options: Additional options for workflow execution.
|
||||
|
||||
:returns: A `WorkflowRunRef` object representing the reference to the workflow run.
|
||||
"""
|
||||
ref = await self._workflow.aio_run_no_wait(input, options)
|
||||
|
||||
return TaskRunRef[TWorkflowInput, R](self, ref)
|
||||
|
||||
def run_many(self, workflows: list[WorkflowRunTriggerConfig]) -> list[R]:
|
||||
"""
|
||||
Run a workflow in bulk and wait for all runs to complete.
|
||||
This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results.
|
||||
|
||||
:param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered.
|
||||
:returns: A list of results for each workflow run.
|
||||
"""
|
||||
return [
|
||||
self._extract_result(result)
|
||||
for result in self._workflow.run_many(workflows)
|
||||
]
|
||||
|
||||
async def aio_run_many(self, workflows: list[WorkflowRunTriggerConfig]) -> list[R]:
|
||||
"""
|
||||
Run a workflow in bulk and wait for all runs to complete.
|
||||
This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results.
|
||||
|
||||
:param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered.
|
||||
:returns: A list of results for each workflow run.
|
||||
"""
|
||||
return [
|
||||
self._extract_result(result)
|
||||
for result in await self._workflow.aio_run_many(workflows)
|
||||
@@ -118,6 +170,14 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
def run_many_no_wait(
|
||||
self, workflows: list[WorkflowRunTriggerConfig]
|
||||
) -> list[TaskRunRef[TWorkflowInput, R]]:
|
||||
"""
|
||||
Run a workflow in bulk without waiting for all runs to complete.
|
||||
|
||||
This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
:param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered.
|
||||
:returns: A list of `WorkflowRunRef` objects, each representing a reference to a workflow run.
|
||||
"""
|
||||
refs = self._workflow.run_many_no_wait(workflows)
|
||||
|
||||
return [TaskRunRef[TWorkflowInput, R](self, ref) for ref in refs]
|
||||
@@ -125,6 +185,15 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
async def aio_run_many_no_wait(
|
||||
self, workflows: list[WorkflowRunTriggerConfig]
|
||||
) -> list[TaskRunRef[TWorkflowInput, R]]:
|
||||
"""
|
||||
Run a workflow in bulk without waiting for all runs to complete.
|
||||
|
||||
This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
:param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered.
|
||||
|
||||
:returns: A list of `WorkflowRunRef` objects, each representing a reference to a workflow run.
|
||||
"""
|
||||
refs = await self._workflow.aio_run_many_no_wait(workflows)
|
||||
|
||||
return [TaskRunRef[TWorkflowInput, R](self, ref) for ref in refs]
|
||||
@@ -135,6 +204,14 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
||||
) -> WorkflowVersion:
|
||||
"""
|
||||
Schedule a workflow to run at a specific time.
|
||||
|
||||
:param run_at: The time at which to schedule the workflow.
|
||||
:param input: The input data for the workflow.
|
||||
:param options: Additional options for workflow execution.
|
||||
:returns: A `WorkflowVersion` object representing the scheduled workflow.
|
||||
"""
|
||||
return self._workflow.schedule(
|
||||
run_at=run_at,
|
||||
input=input,
|
||||
@@ -147,6 +224,14 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
||||
) -> WorkflowVersion:
|
||||
"""
|
||||
Schedule a workflow to run at a specific time.
|
||||
|
||||
:param run_at: The time at which to schedule the workflow.
|
||||
:param input: The input data for the workflow.
|
||||
:param options: Additional options for workflow execution.
|
||||
:returns: A `WorkflowVersion` object representing the scheduled workflow.
|
||||
"""
|
||||
return await self._workflow.aio_schedule(
|
||||
run_at=run_at,
|
||||
input=input,
|
||||
@@ -159,12 +244,25 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
expression: str,
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
additional_metadata: JSONSerializableMapping = {},
|
||||
priority: int | None = None,
|
||||
) -> CronWorkflows:
|
||||
"""
|
||||
Create a cron job for the workflow.
|
||||
|
||||
:param cron_name: The name of the cron job.
|
||||
:param expression: The cron expression that defines the schedule for the cron job.
|
||||
:param input: The input data for the workflow.
|
||||
:param additional_metadata: Additional metadata for the cron job.
|
||||
:param priority: The priority of the cron job. Must be between 1 and 3, inclusive.
|
||||
|
||||
:returns: A `CronWorkflows` object representing the created cron job.
|
||||
"""
|
||||
return self._workflow.create_cron(
|
||||
cron_name=cron_name,
|
||||
expression=expression,
|
||||
input=input,
|
||||
additional_metadata=additional_metadata,
|
||||
priority=priority,
|
||||
)
|
||||
|
||||
async def aio_create_cron(
|
||||
@@ -173,12 +271,25 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]):
|
||||
expression: str,
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
additional_metadata: JSONSerializableMapping = {},
|
||||
priority: int | None = None,
|
||||
) -> CronWorkflows:
|
||||
"""
|
||||
Create a cron job for the workflow.
|
||||
|
||||
:param cron_name: The name of the cron job.
|
||||
:param expression: The cron expression that defines the schedule for the cron job.
|
||||
:param input: The input data for the workflow.
|
||||
:param additional_metadata: Additional metadata for the cron job.
|
||||
:param priority: The priority of the cron job. Must be between 1 and 3, inclusive.
|
||||
|
||||
:returns: A `CronWorkflows` object representing the created cron job.
|
||||
"""
|
||||
return await self._workflow.aio_create_cron(
|
||||
cron_name=cron_name,
|
||||
expression=expression,
|
||||
input=input,
|
||||
additional_metadata=additional_metadata,
|
||||
priority=priority,
|
||||
)
|
||||
|
||||
def to_task(self) -> Task[TWorkflowInput, R]:
|
||||
|
||||
@@ -195,6 +195,15 @@ class BaseWorkflow(Generic[TWorkflowInput]):
|
||||
key: str | None = None,
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> WorkflowRunTriggerConfig:
|
||||
"""
|
||||
Create a bulk run item for the workflow. This is intended to be used in conjunction with the various `run_many` methods.
|
||||
|
||||
:param input: The input data for the workflow.
|
||||
:param key: The key for the workflow run. This is used to identify the run in the bulk operation and for deduplication.
|
||||
:param options: Additional options for the workflow run.
|
||||
|
||||
:returns: A `WorkflowRunTriggerConfig` object that can be used to trigger the workflow run, which you then pass into the `run_many` methods.
|
||||
"""
|
||||
return WorkflowRunTriggerConfig(
|
||||
workflow_name=self.config.name,
|
||||
input=self._serialize_input(input),
|
||||
@@ -216,8 +225,39 @@ class BaseWorkflow(Generic[TWorkflowInput]):
|
||||
|
||||
class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
"""
|
||||
A Hatchet workflow, which allows you to define tasks to be run and perform actions on the workflow, such as
|
||||
running / spawning children and scheduling future runs.
|
||||
A Hatchet workflow, which allows you to define tasks to be run and perform actions on the workflow.
|
||||
|
||||
Workflows in Hatchet represent coordinated units of work that can be triggered, scheduled, or run on a cron schedule.
|
||||
Each workflow can contain multiple tasks that can be arranged in dependencies (DAGs), have customized retry behavior,
|
||||
timeouts, concurrency controls, and more.
|
||||
|
||||
Example:
|
||||
```python
|
||||
from pydantic import BaseModel
|
||||
from hatchet_sdk import Hatchet
|
||||
|
||||
class MyInput(BaseModel):
|
||||
name: str
|
||||
|
||||
hatchet = Hatchet()
|
||||
workflow = hatchet.workflow("my-workflow", input_type=MyInput)
|
||||
|
||||
@workflow.task()
|
||||
def greet(input, ctx):
|
||||
return f"Hello, {input.name}!"
|
||||
|
||||
# Run the workflow
|
||||
result = workflow.run(MyInput(name="World"))
|
||||
```
|
||||
|
||||
Workflows support various execution patterns including:
|
||||
- One-time execution with `run()` or `aio_run()`
|
||||
- Scheduled execution with `schedule()`
|
||||
- Cron-based recurring execution with `create_cron()`
|
||||
- Bulk operations with `run_many()`
|
||||
|
||||
Tasks within workflows can be defined with `@workflow.task()` or `@workflow.durable_task()` decorators
|
||||
and can be arranged into complex dependency patterns.
|
||||
"""
|
||||
|
||||
def run_no_wait(
|
||||
@@ -225,6 +265,15 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> WorkflowRunRef:
|
||||
"""
|
||||
Synchronously trigger a workflow run without waiting for it to complete.
|
||||
This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
|
||||
:param input: The input data for the workflow.
|
||||
:param options: Additional options for workflow execution.
|
||||
|
||||
:returns: A `WorkflowRunRef` object representing the reference to the workflow run.
|
||||
"""
|
||||
return self.client._client.admin.run_workflow(
|
||||
workflow_name=self.config.name,
|
||||
input=self._serialize_input(input),
|
||||
@@ -236,6 +285,17 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Run the workflow synchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
|
||||
:param input: The input data for the workflow, must match the workflow's input type.
|
||||
:param options: Additional options for workflow execution like metadata and parent workflow ID.
|
||||
|
||||
:returns: The result of the workflow execution as a dictionary.
|
||||
"""
|
||||
|
||||
ref = self.client._client.admin.run_workflow(
|
||||
workflow_name=self.config.name,
|
||||
input=self._serialize_input(input),
|
||||
@@ -249,6 +309,16 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> WorkflowRunRef:
|
||||
"""
|
||||
Asynchronously trigger a workflow run without waiting for it to complete.
|
||||
This method is useful for starting a workflow run and immediately returning a reference to the run without blocking while the workflow runs.
|
||||
|
||||
:param input: The input data for the workflow.
|
||||
:param options: Additional options for workflow execution.
|
||||
|
||||
:returns: A `WorkflowRunRef` object representing the reference to the workflow run.
|
||||
"""
|
||||
|
||||
return await self.client._client.admin.aio_run_workflow(
|
||||
workflow_name=self.config.name,
|
||||
input=self._serialize_input(input),
|
||||
@@ -260,6 +330,16 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: TriggerWorkflowOptions = TriggerWorkflowOptions(),
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Run the workflow asynchronously and wait for it to complete.
|
||||
|
||||
This method triggers a workflow run, blocks until completion, and returns the final result.
|
||||
|
||||
:param input: The input data for the workflow, must match the workflow's input type.
|
||||
:param options: Additional options for workflow execution like metadata and parent workflow ID.
|
||||
|
||||
:returns: The result of the workflow execution as a dictionary.
|
||||
"""
|
||||
ref = await self.client._client.admin.aio_run_workflow(
|
||||
workflow_name=self.config.name,
|
||||
input=self._serialize_input(input),
|
||||
@@ -272,6 +352,13 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
self,
|
||||
workflows: list[WorkflowRunTriggerConfig],
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Run a workflow in bulk and wait for all runs to complete.
|
||||
This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results.
|
||||
|
||||
:param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered.
|
||||
:returns: A list of results for each workflow run.
|
||||
"""
|
||||
refs = self.client._client.admin.run_workflows(
|
||||
workflows=workflows,
|
||||
)
|
||||
@@ -282,6 +369,13 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
self,
|
||||
workflows: list[WorkflowRunTriggerConfig],
|
||||
) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Run a workflow in bulk and wait for all runs to complete.
|
||||
This method triggers multiple workflow runs, blocks until all of them complete, and returns the final results.
|
||||
|
||||
:param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered.
|
||||
:returns: A list of results for each workflow run.
|
||||
"""
|
||||
refs = await self.client._client.admin.aio_run_workflows(
|
||||
workflows=workflows,
|
||||
)
|
||||
@@ -292,6 +386,14 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
self,
|
||||
workflows: list[WorkflowRunTriggerConfig],
|
||||
) -> list[WorkflowRunRef]:
|
||||
"""
|
||||
Run a workflow in bulk without waiting for all runs to complete.
|
||||
|
||||
This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
:param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered.
|
||||
:returns: A list of `WorkflowRunRef` objects, each representing a reference to a workflow run.
|
||||
"""
|
||||
return self.client._client.admin.run_workflows(
|
||||
workflows=workflows,
|
||||
)
|
||||
@@ -300,6 +402,15 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
self,
|
||||
workflows: list[WorkflowRunTriggerConfig],
|
||||
) -> list[WorkflowRunRef]:
|
||||
"""
|
||||
Run a workflow in bulk without waiting for all runs to complete.
|
||||
|
||||
This method triggers multiple workflow runs and immediately returns a list of references to the runs without blocking while the workflows run.
|
||||
|
||||
:param workflows: A list of `WorkflowRunTriggerConfig` objects, each representing a workflow run to be triggered.
|
||||
|
||||
:returns: A list of `WorkflowRunRef` objects, each representing a reference to a workflow run.
|
||||
"""
|
||||
return await self.client._client.admin.aio_run_workflows(
|
||||
workflows=workflows,
|
||||
)
|
||||
@@ -310,6 +421,14 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
||||
) -> WorkflowVersion:
|
||||
"""
|
||||
Schedule a workflow to run at a specific time.
|
||||
|
||||
:param run_at: The time at which to schedule the workflow.
|
||||
:param input: The input data for the workflow.
|
||||
:param options: Additional options for workflow execution.
|
||||
:returns: A `WorkflowVersion` object representing the scheduled workflow.
|
||||
"""
|
||||
return self.client._client.admin.schedule_workflow(
|
||||
name=self.config.name,
|
||||
schedules=cast(list[datetime | timestamp_pb2.Timestamp], [run_at]),
|
||||
@@ -323,6 +442,14 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
input: TWorkflowInput = cast(TWorkflowInput, EmptyModel()),
|
||||
options: ScheduleTriggerWorkflowOptions = ScheduleTriggerWorkflowOptions(),
|
||||
) -> WorkflowVersion:
|
||||
"""
|
||||
Schedule a workflow to run at a specific time.
|
||||
|
||||
:param run_at: The time at which to schedule the workflow.
|
||||
:param input: The input data for the workflow.
|
||||
:param options: Additional options for workflow execution.
|
||||
:returns: A `WorkflowVersion` object representing the scheduled workflow.
|
||||
"""
|
||||
return await self.client._client.admin.aio_schedule_workflow(
|
||||
name=self.config.name,
|
||||
schedules=cast(list[datetime | timestamp_pb2.Timestamp], [run_at]),
|
||||
@@ -338,6 +465,17 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
additional_metadata: JSONSerializableMapping = {},
|
||||
priority: int | None = None,
|
||||
) -> CronWorkflows:
|
||||
"""
|
||||
Create a cron job for the workflow.
|
||||
|
||||
:param cron_name: The name of the cron job.
|
||||
:param expression: The cron expression that defines the schedule for the cron job.
|
||||
:param input: The input data for the workflow.
|
||||
:param additional_metadata: Additional metadata for the cron job.
|
||||
:param priority: The priority of the cron job. Must be between 1 and 3, inclusive.
|
||||
|
||||
:returns: A `CronWorkflows` object representing the created cron job.
|
||||
"""
|
||||
return self.client.cron.create(
|
||||
workflow_name=self.config.name,
|
||||
cron_name=cron_name,
|
||||
@@ -355,6 +493,17 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
additional_metadata: JSONSerializableMapping = {},
|
||||
priority: int | None = None,
|
||||
) -> CronWorkflows:
|
||||
"""
|
||||
Create a cron job for the workflow.
|
||||
|
||||
:param cron_name: The name of the cron job.
|
||||
:param expression: The cron expression that defines the schedule for the cron job.
|
||||
:param input: The input data for the workflow.
|
||||
:param additional_metadata: Additional metadata for the cron job.
|
||||
:param priority: The priority of the cron job. Must be between 1 and 3, inclusive.
|
||||
|
||||
:returns: A `CronWorkflows` object representing the created cron job.
|
||||
"""
|
||||
return await self.client.cron.aio_create(
|
||||
workflow_name=self.config.name,
|
||||
cron_name=cron_name,
|
||||
@@ -393,46 +542,35 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
cancel_if: list[Condition | OrGroup] = [],
|
||||
) -> Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]:
|
||||
"""
|
||||
A decorator to transform a function into a Hatchet task that run as part of a workflow.
|
||||
A decorator to transform a function into a Hatchet task that runs as part of a workflow.
|
||||
|
||||
:param name: The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator.
|
||||
:type name: str | None
|
||||
|
||||
:param timeout: The execution timeout of the task. Defaults to 60 minutes.
|
||||
:type timeout: datetime.timedelta | str
|
||||
:param schedule_timeout: The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time.
|
||||
|
||||
:param parents: A list of tasks that are parents of the task. Note: Parents must be defined before their children. Defaults to an empty list (no parents).
|
||||
:type parents: list[Task]
|
||||
:param execution_timeout: The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time.
|
||||
|
||||
:param retries: The number of times to retry the task before failing. Default: `0`
|
||||
:type retries: int
|
||||
:param parents: A list of tasks that are parents of the task. Note: Parents must be defined before their children.
|
||||
|
||||
:param rate_limits: A list of rate limit configurations for the task. Defaults to an empty list (no rate limits).
|
||||
:type rate_limits: list[RateLimit]
|
||||
:param retries: The number of times to retry the task before failing.
|
||||
|
||||
:param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned. See documentation and examples on affinity and worker labels for more details. Defaults to an empty dictionary (no desired worker labels).
|
||||
:type desired_worker_labels: dict[str, DesiredWorkerLabel]
|
||||
:param rate_limits: A list of rate limit configurations for the task.
|
||||
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: `None`
|
||||
:type backoff_factor: float | None
|
||||
:param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned. See documentation and examples on affinity and worker labels for more details.
|
||||
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: `None`
|
||||
:type backoff_max_seconds: int | None
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries.
|
||||
|
||||
:param concurrency: A list of concurrency expressions for the task. Defaults to an empty list (no concurrency).
|
||||
:type concurrency: list[ConcurrencyExpression]
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue.
|
||||
|
||||
:param wait_for: A list of conditions that must be met before the task can run. Defaults to an empty list (no conditions).
|
||||
:type wait_for: list[Condition | OrGroup]
|
||||
:param concurrency: A list of concurrency expressions for the task.
|
||||
|
||||
:param skip_if: A list of conditions that, if met, will cause the task to be skipped. Defaults to an empty list (no conditions).
|
||||
:type skip_if: list[Condition | OrGroup]
|
||||
:param wait_for: A list of conditions that must be met before the task can run.
|
||||
|
||||
:param cancel_if: A list of conditions that, if met, will cause the task to be canceled. Defaults to an empty list (no conditions).
|
||||
:type cancel_if: list[Condition | OrGroup]
|
||||
:param skip_if: A list of conditions that, if met, will cause the task to be skipped.
|
||||
|
||||
:param cancel_if: A list of conditions that, if met, will cause the task to be canceled.
|
||||
|
||||
:returns: A decorator which creates a `Task` object.
|
||||
:rtype: Callable[[Callable[[Type[BaseModel], Context], R]], Task[Type[BaseModel], R]]
|
||||
"""
|
||||
|
||||
def inner(
|
||||
@@ -493,43 +631,32 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
See the Hatchet docs for more information on durable execution to decide if this is right for you.
|
||||
|
||||
:param name: The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator.
|
||||
:type name: str | None
|
||||
|
||||
:param timeout: The execution timeout of the task. Defaults to 60 minutes.
|
||||
:type timeout: datetime.timedelta | str
|
||||
:param schedule_timeout: The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time.
|
||||
|
||||
:param parents: A list of tasks that are parents of the task. Note: Parents must be defined before their children. Defaults to an empty list (no parents).
|
||||
:type parents: list[Task]
|
||||
:param execution_timeout: The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time.
|
||||
|
||||
:param retries: The number of times to retry the task before failing. Default: `0`
|
||||
:type retries: int
|
||||
:param parents: A list of tasks that are parents of the task. Note: Parents must be defined before their children.
|
||||
|
||||
:param rate_limits: A list of rate limit configurations for the task. Defaults to an empty list (no rate limits).
|
||||
:type rate_limits: list[RateLimit]
|
||||
:param retries: The number of times to retry the task before failing.
|
||||
|
||||
:param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned. See documentation and examples on affinity and worker labels for more details. Defaults to an empty dictionary (no desired worker labels).
|
||||
:type desired_worker_labels: dict[str, DesiredWorkerLabel]
|
||||
:param rate_limits: A list of rate limit configurations for the task.
|
||||
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: `None`
|
||||
:type backoff_factor: float | None
|
||||
:param desired_worker_labels: A dictionary of desired worker labels that determine to which worker the task should be assigned. See documentation and examples on affinity and worker labels for more details.
|
||||
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: `None`
|
||||
:type backoff_max_seconds: int | None
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries.
|
||||
|
||||
:param concurrency: A list of concurrency expressions for the task. Defaults to an empty list (no concurrency).
|
||||
:type concurrency: list[ConcurrencyExpression]
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue.
|
||||
|
||||
:param wait_for: A list of conditions that must be met before the task can run. Defaults to an empty list (no conditions).
|
||||
:type wait_for: list[Condition | OrGroup]
|
||||
:param concurrency: A list of concurrency expressions for the task.
|
||||
|
||||
:param skip_if: A list of conditions that, if met, will cause the task to be skipped. Defaults to an empty list (no conditions).
|
||||
:type skip_if: list[Condition | OrGroup]
|
||||
:param wait_for: A list of conditions that must be met before the task can run.
|
||||
|
||||
:param cancel_if: A list of conditions that, if met, will cause the task to be canceled. Defaults to an empty list (no conditions).
|
||||
:type cancel_if: list[Condition | OrGroup]
|
||||
:param skip_if: A list of conditions that, if met, will cause the task to be skipped.
|
||||
|
||||
:param cancel_if: A list of conditions that, if met, will cause the task to be canceled.
|
||||
|
||||
:returns: A decorator which creates a `Task` object.
|
||||
:rtype: Callable[[Callable[[Type[BaseModel], Context], R]], Task[Type[BaseModel], R]]
|
||||
"""
|
||||
|
||||
def inner(
|
||||
@@ -579,25 +706,22 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
A decorator to transform a function into a Hatchet on-failure task that runs as the last step in a workflow that had at least one task fail.
|
||||
|
||||
:param name: The name of the on-failure task. If not specified, defaults to the name of the function being wrapped by the `on_failure_task` decorator.
|
||||
:type name: str | None
|
||||
|
||||
:param timeout: The execution timeout of the on-failure task. Defaults to 60 minutes.
|
||||
:type timeout: datetime.timedelta | str
|
||||
:param schedule_timeout: The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time.
|
||||
|
||||
:param retries: The number of times to retry the on-failure task before failing. Default: `0`
|
||||
:type retries: int
|
||||
:param execution_timeout: The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time.
|
||||
|
||||
:param rate_limits: A list of rate limit configurations for the on-failure task. Defaults to an empty list (no rate limits).
|
||||
:type rate_limits: list[RateLimit]
|
||||
:param retries: The number of times to retry the on-failure task before failing.
|
||||
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: `None`
|
||||
:type backoff_factor: float | None
|
||||
:param rate_limits: A list of rate limit configurations for the on-failure task.
|
||||
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: `None`
|
||||
:type backoff_max_seconds: int | None
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries.
|
||||
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue.
|
||||
|
||||
:param concurrency: A list of concurrency expressions for the on-success task.
|
||||
|
||||
:returns: A decorator which creates a `Task` object.
|
||||
:rtype: Callable[[Callable[[Type[BaseModel], Context], R]], Task[Type[BaseModel], R]]
|
||||
"""
|
||||
|
||||
def inner(
|
||||
@@ -642,25 +766,22 @@ class Workflow(BaseWorkflow[TWorkflowInput]):
|
||||
A decorator to transform a function into a Hatchet on-success task that runs as the last step in a workflow that had all upstream tasks succeed.
|
||||
|
||||
:param name: The name of the on-success task. If not specified, defaults to the name of the function being wrapped by the `on_failure_task` decorator.
|
||||
:type name: str | None
|
||||
|
||||
:param timeout: The execution timeout of the on-success task. Defaults to 60 minutes.
|
||||
:type timeout: datetime.timedelta | str
|
||||
:param schedule_timeout: The maximum time to wait for the task to be scheduled. The run will be canceled if the task does not begin within this time.
|
||||
|
||||
:param retries: The number of times to retry the on-success task before failing. Default: `0`
|
||||
:type retries: int
|
||||
:param execution_timeout: The maximum time to wait for the task to complete. The run will be canceled if the task does not complete within this time.
|
||||
|
||||
:param rate_limits: A list of rate limit configurations for the on-success task. Defaults to an empty list (no rate limits).
|
||||
:type rate_limits: list[RateLimit]
|
||||
:param retries: The number of times to retry the on-success task before failing
|
||||
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries. Default: `None`
|
||||
:type backoff_factor: float | None
|
||||
:param rate_limits: A list of rate limit configurations for the on-success task.
|
||||
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue. Default: `None`
|
||||
:type backoff_max_seconds: int | None
|
||||
:param backoff_factor: The backoff factor for controlling exponential backoff in retries.
|
||||
|
||||
:returns: A decorator which creates a `Task` object.
|
||||
:rtype: Callable[[Callable[[Type[BaseModel], Context], R]], Task[Type[BaseModel], R]]
|
||||
:param backoff_max_seconds: The maximum number of seconds to allow retries with exponential backoff to continue.
|
||||
|
||||
:param concurrency: A list of concurrency expressions for the on-success task.
|
||||
|
||||
:returns: A decorator which creates a Task object.
|
||||
"""
|
||||
|
||||
def inner(
|
||||
|
||||
@@ -20,7 +20,7 @@ class CreateCronTriggerInput(BaseModel):
|
||||
Attributes:
|
||||
expression (str): The cron expression defining the schedule.
|
||||
input (dict): The input data for the cron workflow.
|
||||
additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger (e.g. {"key1": "value1", "key2": "value2"}).
|
||||
additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger.
|
||||
"""
|
||||
|
||||
expression: str = None
|
||||
@@ -97,7 +97,7 @@ class CronClient:
|
||||
cron_name (str): The name of the cron trigger.
|
||||
expression (str): The cron expression defining the schedule.
|
||||
input (dict): The input data for the cron workflow.
|
||||
additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger (e.g. {"key1": "value1", "key2": "value2"}).
|
||||
additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger.
|
||||
|
||||
Returns:
|
||||
CronWorkflows: The created cron workflow instance.
|
||||
@@ -209,7 +209,7 @@ class CronClientAsync:
|
||||
cron_name (str): The name of the cron trigger.
|
||||
expression (str): The cron expression defining the schedule.
|
||||
input (dict): The input data for the cron workflow.
|
||||
additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger (e.g. {"key1": "value1", "key2": "value2"}).
|
||||
additional_metadata (dict[str, str]): Additional metadata associated with the cron trigger.
|
||||
|
||||
Returns:
|
||||
CronWorkflows: The created cron workflow instance.
|
||||
|
||||
@@ -67,7 +67,7 @@ class ScheduledClient:
|
||||
workflow_name (str): The name of the scheduled workflow.
|
||||
trigger_at (datetime.datetime): The datetime when the run should be triggered.
|
||||
input (Dict[str, Any]): The input data for the scheduled workflow.
|
||||
additional_metadata (Dict[str, str]): Additional metadata associated with the future run as a key-value pair (e.g. {"key1": "value1", "key2": "value2"}).
|
||||
additional_metadata (Dict[str, str]): Additional metadata associated with the future run as a key-value pair.
|
||||
|
||||
Returns:
|
||||
ScheduledWorkflows: The created scheduled workflow instance.
|
||||
|
||||
@@ -234,9 +234,9 @@ class Hatchet:
|
||||
Initialize a new Hatchet instance.
|
||||
|
||||
Args:
|
||||
debug (bool, optional): Enable debug logging. Defaults to False.
|
||||
client (Optional[Client], optional): A pre-configured Client instance. Defaults to None.
|
||||
config (ClientConfig, optional): Configuration for creating a new Client. Defaults to ClientConfig().
|
||||
debug (bool, optional): Enable debug logging.
|
||||
client (Optional[Client], optional): A pre-configured Client instance.
|
||||
config (ClientConfig, optional): Configuration for creating a new Client.
|
||||
"""
|
||||
if client is not None:
|
||||
self._client = client
|
||||
|
||||
@@ -57,7 +57,6 @@ def create_traceparent() -> str | None:
|
||||
:returns: A W3C-formatted traceparent header value if successful, None if the context
|
||||
injection fails or no active span exists.\n
|
||||
Example: `00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01`
|
||||
:rtype: str | None:
|
||||
"""
|
||||
|
||||
carrier: dict[str, str] = {}
|
||||
@@ -75,10 +74,9 @@ def parse_carrier_from_metadata(metadata: dict[str, str] | None) -> Context | No
|
||||
|
||||
:param metadata: A dictionary containing metadata key-value pairs,
|
||||
potentially including the `traceparent` header. Can be None.
|
||||
:type metadata: dict[str, str] | None
|
||||
|
||||
:returns: The extracted OpenTelemetry Context object if a valid `traceparent`
|
||||
is found in the metadata, otherwise None.
|
||||
:rtype: Context | None
|
||||
|
||||
:Example:
|
||||
|
||||
@@ -108,13 +106,12 @@ def inject_traceparent_into_metadata(
|
||||
`OTEL_TRACEPARENT_KEY`. If no `traceparent` is provided, it attempts to create one.
|
||||
|
||||
:param metadata: The metadata dictionary to inject the `traceparent` into.
|
||||
:type metadata: dict[str, str]
|
||||
|
||||
:param traceparent: The `traceparent` string to inject. If None, attempts to use
|
||||
the current span.
|
||||
:type traceparent: str | None, optional
|
||||
|
||||
:returns: A new metadata dictionary containing the original metadata plus
|
||||
the injected `traceparent`, if one was available or could be created.
|
||||
:rtype: dict[str, str]
|
||||
|
||||
:Example:
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ from enum import Enum
|
||||
from multiprocessing import Queue
|
||||
from multiprocessing.process import BaseProcess
|
||||
from types import FrameType
|
||||
from typing import Any, AsyncGenerator, Callable, TypeVar
|
||||
from typing import Any, AsyncGenerator, Callable, TypeVar, Union
|
||||
from warnings import warn
|
||||
|
||||
from aiohttp import web
|
||||
@@ -60,7 +60,7 @@ class HealthCheckResponse(BaseModel):
|
||||
name: str
|
||||
slots: int
|
||||
actions: list[str]
|
||||
labels: dict[str, str | int]
|
||||
labels: dict[str, Union[str, int]]
|
||||
python_version: str
|
||||
|
||||
|
||||
@@ -88,7 +88,7 @@ class Worker:
|
||||
config: ClientConfig,
|
||||
slots: int,
|
||||
durable_slots: int,
|
||||
labels: dict[str, str | int] = {},
|
||||
labels: dict[str, Union[str, int]] = {},
|
||||
debug: bool = False,
|
||||
owned_loop: bool = True,
|
||||
handle_kill: bool = True,
|
||||
|
||||
@@ -9,3 +9,6 @@ poetry run isort .
|
||||
|
||||
echo "\nType checking with mypy"
|
||||
poetry run mypy --config-file=pyproject.toml
|
||||
|
||||
echo "\nLinting documentation with pydoclint"
|
||||
poetry run pydoclint . --config pyproject.toml
|
||||
|
||||
40
sdks/python/mkdocs.yml
Normal file
40
sdks/python/mkdocs.yml
Normal file
@@ -0,0 +1,40 @@
|
||||
site_name: Hatchet Python SDK
|
||||
site_description: Documentation for the Hatchet SDK
|
||||
repo_url: https://github.com/hatchet-dev/hatchet
|
||||
site_url: https://docs.hatchet.run/sdks/python/
|
||||
use_directory_urls: false
|
||||
|
||||
|
||||
plugins:
|
||||
- markdown-export
|
||||
- mkdocstrings:
|
||||
default_handler: python
|
||||
handlers:
|
||||
python:
|
||||
paths: [hatchet_sdk]
|
||||
options:
|
||||
show_source: false
|
||||
heading_level: 3
|
||||
show_category_heading: true
|
||||
show_if_no_docstring: true
|
||||
show_signature: false
|
||||
show_overloads: false
|
||||
docstring_style: sphinx
|
||||
show_docstring_functions: false
|
||||
inherited_members: false
|
||||
filters: ["!^_", "!^features$"]
|
||||
show_signature_annotations: false
|
||||
annotations_path: "brief"
|
||||
docstring_section_style: "table"
|
||||
show_docstring_attributes: false
|
||||
summary:
|
||||
attributes: false
|
||||
functions: true
|
||||
modules: false
|
||||
|
||||
markdown_extensions:
|
||||
- pymdownx.highlight
|
||||
- pymdownx.superfences
|
||||
- admonition
|
||||
- toc:
|
||||
permalink: true
|
||||
740
sdks/python/poetry.lock
generated
740
sdks/python/poetry.lock
generated
@@ -167,12 +167,35 @@ version = "0.7.0"
|
||||
description = "Reusable constraint types to use with typing.Annotated"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
groups = ["main", "docs"]
|
||||
files = [
|
||||
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
|
||||
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "4.9.0"
|
||||
description = "High level compatibility layer for multiple asynchronous event loop implementations"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"},
|
||||
{file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
|
||||
idna = ">=2.8"
|
||||
sniffio = ">=1.1"
|
||||
typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
|
||||
|
||||
[package.extras]
|
||||
doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
|
||||
test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
|
||||
trio = ["trio (>=0.26.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "async-timeout"
|
||||
version = "5.0.1"
|
||||
@@ -206,6 +229,29 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi
|
||||
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "beautifulsoup4"
|
||||
version = "4.13.4"
|
||||
description = "Screen-scraping library"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b"},
|
||||
{file = "beautifulsoup4-4.13.4.tar.gz", hash = "sha256:dbb3c4e1ceae6aefebdaf2423247260cd062430a410e38c66f2baa50a8437195"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
soupsieve = ">1.2"
|
||||
typing-extensions = ">=4.0.0"
|
||||
|
||||
[package.extras]
|
||||
cchardet = ["cchardet"]
|
||||
chardet = ["chardet"]
|
||||
charset-normalizer = ["charset-normalizer"]
|
||||
html5lib = ["html5lib"]
|
||||
lxml = ["lxml"]
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "24.10.0"
|
||||
@@ -253,6 +299,21 @@ d = ["aiohttp (>=3.10)"]
|
||||
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
|
||||
uvloop = ["uvloop (>=0.15.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "bs4"
|
||||
version = "0.0.2"
|
||||
description = "Dummy package for Beautiful Soup (beautifulsoup4)"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "bs4-0.0.2-py2.py3-none-any.whl", hash = "sha256:abf8742c0805ef7f662dce4b51cca104cffe52b835238afc169142ab9b3fbccc"},
|
||||
{file = "bs4-0.0.2.tar.gz", hash = "sha256:a48685c58f50fe127722417bae83fe6badf500d54b55f7e39ffe43b798653925"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
beautifulsoup4 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "cel-python"
|
||||
version = "0.2.0"
|
||||
@@ -277,14 +338,14 @@ types-pyyaml = ">=6.0.12.20240311,<7.0.0.0"
|
||||
name = "certifi"
|
||||
version = "2025.1.31"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
optional = true
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
groups = ["main", "docs"]
|
||||
files = [
|
||||
{file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"},
|
||||
{file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"},
|
||||
]
|
||||
markers = {main = "extra == \"otel\""}
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
@@ -395,7 +456,7 @@ version = "8.1.8"
|
||||
description = "Composable command line interface toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["lint"]
|
||||
groups = ["docs", "lint"]
|
||||
files = [
|
||||
{file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"},
|
||||
{file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"},
|
||||
@@ -410,7 +471,7 @@ version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
groups = ["lint", "test"]
|
||||
groups = ["docs", "lint", "test"]
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
@@ -436,13 +497,37 @@ wrapt = ">=1.10,<2"
|
||||
[package.extras]
|
||||
dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "distro"
|
||||
version = "1.9.0"
|
||||
description = "Distro - an OS platform information API"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
|
||||
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "docstring-parser-fork"
|
||||
version = "0.0.12"
|
||||
description = "Parse Python docstrings in reST, Google and Numpydoc format"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.7"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "docstring_parser_fork-0.0.12-py3-none-any.whl", hash = "sha256:55d7cbbc8b367655efd64372b9a0b33a49bae930a8ddd5cdc4c6112312e28a87"},
|
||||
{file = "docstring_parser_fork-0.0.12.tar.gz", hash = "sha256:b44c5e0be64ae80f395385f01497d381bd094a57221fd9ff020987d06857b2a0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["test"]
|
||||
groups = ["docs", "test"]
|
||||
markers = "python_version < \"3.11\""
|
||||
files = [
|
||||
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
|
||||
@@ -554,6 +639,24 @@ files = [
|
||||
{file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ghp-import"
|
||||
version = "2.1.0"
|
||||
description = "Copy your docs directly to the gh-pages branch."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"},
|
||||
{file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
python-dateutil = ">=2.8.1"
|
||||
|
||||
[package.extras]
|
||||
dev = ["flake8", "markdown", "twine", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "googleapis-common-protos"
|
||||
version = "1.69.2"
|
||||
@@ -573,6 +676,21 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4
|
||||
[package.extras]
|
||||
grpc = ["grpcio (>=1.44.0,<2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "griffe"
|
||||
version = "1.7.2"
|
||||
description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "griffe-1.7.2-py3-none-any.whl", hash = "sha256:1ed9c2e338a75741fc82083fe5a1bc89cb6142efe126194cc313e34ee6af5423"},
|
||||
{file = "griffe-1.7.2.tar.gz", hash = "sha256:98d396d803fab3b680c2608f300872fd57019ed82f0672f5b5323a9ad18c540c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = ">=0.4"
|
||||
|
||||
[[package]]
|
||||
name = "grpc-stubs"
|
||||
version = "1.53.0.5"
|
||||
@@ -718,13 +836,72 @@ grpcio = ">=1.71.0"
|
||||
protobuf = ">=5.26.1,<6.0dev"
|
||||
setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
|
||||
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.8"
|
||||
description = "A minimal low-level HTTP client."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"},
|
||||
{file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = "*"
|
||||
h11 = ">=0.13,<0.15"
|
||||
|
||||
[package.extras]
|
||||
asyncio = ["anyio (>=4.0,<5.0)"]
|
||||
http2 = ["h2 (>=3,<5)"]
|
||||
socks = ["socksio (==1.*)"]
|
||||
trio = ["trio (>=0.22.0,<1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
description = "The next generation HTTP client."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
|
||||
{file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = "*"
|
||||
certifi = "*"
|
||||
httpcore = "==1.*"
|
||||
idna = "*"
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli", "brotlicffi"]
|
||||
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
|
||||
http2 = ["h2 (>=3,<5)"]
|
||||
socks = ["socksio (==1.*)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
groups = ["main", "docs"]
|
||||
files = [
|
||||
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
|
||||
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
|
||||
@@ -785,6 +962,110 @@ files = [
|
||||
[package.extras]
|
||||
colors = ["colorama (>=0.4.6)"]
|
||||
|
||||
[[package]]
|
||||
name = "jinja2"
|
||||
version = "3.1.6"
|
||||
description = "A very fast and expressive template engine."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
|
||||
{file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
MarkupSafe = ">=2.0"
|
||||
|
||||
[package.extras]
|
||||
i18n = ["Babel (>=2.7)"]
|
||||
|
||||
[[package]]
|
||||
name = "jiter"
|
||||
version = "0.9.0"
|
||||
description = "Fast iterable JSON parser."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "jiter-0.9.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:816ec9b60fdfd1fec87da1d7ed46c66c44ffec37ab2ef7de5b147b2fce3fd5ad"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b1d3086f8a3ee0194ecf2008cf81286a5c3e540d977fa038ff23576c023c0ea"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1339f839b91ae30b37c409bf16ccd3dc453e8b8c3ed4bd1d6a567193651a4a51"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ffba79584b3b670fefae66ceb3a28822365d25b7bf811e030609a3d5b876f538"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cfc7d0a8e899089d11f065e289cb5b2daf3d82fbe028f49b20d7b809193958d"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e00a1a2bbfaaf237e13c3d1592356eab3e9015d7efd59359ac8b51eb56390a12"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1d9870561eb26b11448854dce0ff27a9a27cb616b632468cafc938de25e9e51"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9872aeff3f21e437651df378cb75aeb7043e5297261222b6441a620218b58708"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1fd19112d1049bdd47f17bfbb44a2c0001061312dcf0e72765bfa8abd4aa30e5"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6ef5da104664e526836070e4a23b5f68dec1cc673b60bf1edb1bfbe8a55d0678"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-win32.whl", hash = "sha256:cb12e6d65ebbefe5518de819f3eda53b73187b7089040b2d17f5b39001ff31c4"},
|
||||
{file = "jiter-0.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:c43ca669493626d8672be3b645dbb406ef25af3f4b6384cfd306da7eb2e70322"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6c4d99c71508912a7e556d631768dcdef43648a93660670986916b297f1c54af"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f60fb8ce7df529812bf6c625635a19d27f30806885139e367af93f6e734ef58"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51c4e1a4f8ea84d98b7b98912aa4290ac3d1eabfde8e3c34541fae30e9d1f08b"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f4c677c424dc76684fea3e7285a7a2a7493424bea89ac441045e6a1fb1d7b3b"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2221176dfec87f3470b21e6abca056e6b04ce9bff72315cb0b243ca9e835a4b5"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c7adb66f899ffa25e3c92bfcb593391ee1947dbdd6a9a970e0d7e713237d572"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c98d27330fdfb77913c1097a7aab07f38ff2259048949f499c9901700789ac15"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eda3f8cc74df66892b1d06b5d41a71670c22d95a1ca2cbab73654745ce9d0419"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:dd5ab5ddc11418dce28343123644a100f487eaccf1de27a459ab36d6cca31043"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42f8a68a69f047b310319ef8e2f52fdb2e7976fb3313ef27df495cf77bcad965"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-win32.whl", hash = "sha256:a25519efb78a42254d59326ee417d6f5161b06f5da827d94cf521fed961b1ff2"},
|
||||
{file = "jiter-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:923b54afdd697dfd00d368b7ccad008cccfeb1efb4e621f32860c75e9f25edbd"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7b46249cfd6c48da28f89eb0be3f52d6fdb40ab88e2c66804f546674e539ec11"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:609cf3c78852f1189894383cf0b0b977665f54cb38788e3e6b941fa6d982c00e"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d726a3890a54561e55a9c5faea1f7655eda7f105bd165067575ace6e65f80bb2"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e89dc075c1fef8fa9be219e249f14040270dbc507df4215c324a1839522ea75"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04e8ffa3c353b1bc4134f96f167a2082494351e42888dfcf06e944f2729cbe1d"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:203f28a72a05ae0e129b3ed1f75f56bc419d5f91dfacd057519a8bd137b00c42"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fca1a02ad60ec30bb230f65bc01f611c8608b02d269f998bc29cca8619a919dc"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:237e5cee4d5d2659aaf91bbf8ec45052cc217d9446070699441a91b386ae27dc"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:528b6b71745e7326eed73c53d4aa57e2a522242320b6f7d65b9c5af83cf49b6e"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9f48e86b57bc711eb5acdfd12b6cb580a59cc9a993f6e7dcb6d8b50522dcd50d"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-win32.whl", hash = "sha256:699edfde481e191d81f9cf6d2211debbfe4bd92f06410e7637dffb8dd5dfde06"},
|
||||
{file = "jiter-0.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:099500d07b43f61d8bd780466d429c45a7b25411b334c60ca875fa775f68ccb0"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2764891d3f3e8b18dce2cff24949153ee30c9239da7c00f032511091ba688ff7"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:387b22fbfd7a62418d5212b4638026d01723761c75c1c8232a8b8c37c2f1003b"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d8da8629ccae3606c61d9184970423655fb4e33d03330bcdfe52d234d32f69"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1be73d8982bdc278b7b9377426a4b44ceb5c7952073dd7488e4ae96b88e1103"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2228eaaaa111ec54b9e89f7481bffb3972e9059301a878d085b2b449fbbde635"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11509bfecbc319459647d4ac3fd391d26fdf530dad00c13c4dadabf5b81f01a4"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f22238da568be8bbd8e0650e12feeb2cfea15eda4f9fc271d3b362a4fa0604d"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17f5d55eb856597607562257c8e36c42bc87f16bef52ef7129b7da11afc779f3"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:6a99bed9fbb02f5bed416d137944419a69aa4c423e44189bc49718859ea83bc5"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e057adb0cd1bd39606100be0eafe742de2de88c79df632955b9ab53a086b3c8d"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-win32.whl", hash = "sha256:f7e6850991f3940f62d387ccfa54d1a92bd4bb9f89690b53aea36b4364bcab53"},
|
||||
{file = "jiter-0.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:c8ae3bf27cd1ac5e6e8b7a27487bf3ab5f82318211ec2e1346a5b058756361f7"},
|
||||
{file = "jiter-0.9.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f0b2827fb88dda2cbecbbc3e596ef08d69bda06c6f57930aec8e79505dc17001"},
|
||||
{file = "jiter-0.9.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062b756ceb1d40b0b28f326cba26cfd575a4918415b036464a52f08632731e5a"},
|
||||
{file = "jiter-0.9.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6f7838bc467ab7e8ef9f387bd6de195c43bad82a569c1699cb822f6609dd4cdf"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4a2d16360d0642cd68236f931b85fe50288834c383492e4279d9f1792e309571"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e84ed1c9c9ec10bbb8c37f450077cbe3c0d4e8c2b19f0a49a60ac7ace73c7452"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f3c848209ccd1bfa344a1240763975ca917de753c7875c77ec3034f4151d06c"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7825f46e50646bee937e0f849d14ef3a417910966136f59cd1eb848b8b5bb3e4"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d82a811928b26d1a6311a886b2566f68ccf2b23cf3bfed042e18686f1f22c2d7"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0c058ecb51763a67f019ae423b1cbe3fa90f7ee6280c31a1baa6ccc0c0e2d06e"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9897115ad716c48f0120c1f0c4efae348ec47037319a6c63b2d7838bb53aaef4"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:351f4c90a24c4fb8c87c6a73af2944c440494ed2bea2094feecacb75c50398ae"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d45807b0f236c485e1e525e2ce3a854807dfe28ccf0d013dd4a563395e28008a"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1537a890724ba00fdba21787010ac6f24dad47f763410e9e1093277913592784"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-win32.whl", hash = "sha256:e3630ec20cbeaddd4b65513fa3857e1b7c4190d4481ef07fb63d0fad59033321"},
|
||||
{file = "jiter-0.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:2685f44bf80e95f8910553bf2d33b9c87bf25fceae6e9f0c1355f75d2922b0ee"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9ef340fae98065071ccd5805fe81c99c8f80484e820e40043689cf97fb66b3e2"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:efb767d92c63b2cd9ec9f24feeb48f49574a713870ec87e9ba0c2c6e9329c3e2"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:113f30f87fb1f412510c6d7ed13e91422cfd329436364a690c34c8b8bd880c42"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8793b6df019b988526f5a633fdc7456ea75e4a79bd8396a3373c371fc59f5c9b"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a9aaa5102dba4e079bb728076fadd5a2dca94c05c04ce68004cfd96f128ea34"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d838650f6ebaf4ccadfb04522463e74a4c378d7e667e0eb1865cfe3990bfac49"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0194f813efdf4b8865ad5f5c5f50f8566df7d770a82c51ef593d09e0b347020"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a7954a401d0a8a0b8bc669199db78af435aae1e3569187c2939c477c53cb6a0a"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4feafe787eb8a8d98168ab15637ca2577f6ddf77ac6c8c66242c2d028aa5420e"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:27cd1f2e8bb377f31d3190b34e4328d280325ad7ef55c6ac9abde72f79e84d2e"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-win32.whl", hash = "sha256:161d461dcbe658cf0bd0aa375b30a968b087cdddc624fc585f3867c63c6eca95"},
|
||||
{file = "jiter-0.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:e8b36d8a16a61993be33e75126ad3d8aa29cf450b09576f3c427d27647fcb4aa"},
|
||||
{file = "jiter-0.9.0.tar.gz", hash = "sha256:aadba0964deb424daa24492abc3d229c60c4a31bfee205aedbf1acc7639d7893"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jmespath"
|
||||
version = "1.0.1"
|
||||
@@ -814,6 +1095,230 @@ atomic-cache = ["atomicwrites"]
|
||||
nearley = ["js2py"]
|
||||
regex = ["regex"]
|
||||
|
||||
[[package]]
|
||||
name = "markdown"
|
||||
version = "3.8"
|
||||
description = "Python implementation of John Gruber's Markdown."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "markdown-3.8-py3-none-any.whl", hash = "sha256:794a929b79c5af141ef5ab0f2f642d0f7b1872981250230e72682346f7cc90dc"},
|
||||
{file = "markdown-3.8.tar.gz", hash = "sha256:7df81e63f0df5c4b24b7d156eb81e4690595239b7d70937d0409f1b0de319c6f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["mdx_gh_links (>=0.2)", "mkdocs (>=1.6)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"]
|
||||
testing = ["coverage", "pyyaml"]
|
||||
|
||||
[[package]]
|
||||
name = "markdownify"
|
||||
version = "1.1.0"
|
||||
description = "Convert HTML to markdown."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "markdownify-1.1.0-py3-none-any.whl", hash = "sha256:32a5a08e9af02c8a6528942224c91b933b4bd2c7d078f9012943776fc313eeef"},
|
||||
{file = "markdownify-1.1.0.tar.gz", hash = "sha256:449c0bbbf1401c5112379619524f33b63490a8fa479456d41de9dc9e37560ebd"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
beautifulsoup4 = ">=4.9,<5"
|
||||
six = ">=1.15,<2"
|
||||
|
||||
[[package]]
|
||||
name = "markupsafe"
|
||||
version = "3.0.2"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"},
|
||||
{file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"},
|
||||
{file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"},
|
||||
{file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"},
|
||||
{file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"},
|
||||
{file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"},
|
||||
{file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mergedeep"
|
||||
version = "1.3.4"
|
||||
description = "A deep merge function for 🐍."
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"},
|
||||
{file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs"
|
||||
version = "1.6.1"
|
||||
description = "Project documentation with Markdown."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"},
|
||||
{file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=7.0"
|
||||
colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""}
|
||||
ghp-import = ">=1.0"
|
||||
jinja2 = ">=2.11.1"
|
||||
markdown = ">=3.3.6"
|
||||
markupsafe = ">=2.0.1"
|
||||
mergedeep = ">=1.3.4"
|
||||
mkdocs-get-deps = ">=0.2.0"
|
||||
packaging = ">=20.5"
|
||||
pathspec = ">=0.11.1"
|
||||
pyyaml = ">=5.1"
|
||||
pyyaml-env-tag = ">=0.1"
|
||||
watchdog = ">=2.0"
|
||||
|
||||
[package.extras]
|
||||
i18n = ["babel (>=2.9.0)"]
|
||||
min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.4)", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs-autorefs"
|
||||
version = "1.4.1"
|
||||
description = "Automatically link across pages in MkDocs."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "mkdocs_autorefs-1.4.1-py3-none-any.whl", hash = "sha256:9793c5ac06a6ebbe52ec0f8439256e66187badf4b5334b5fde0b128ec134df4f"},
|
||||
{file = "mkdocs_autorefs-1.4.1.tar.gz", hash = "sha256:4b5b6235a4becb2b10425c2fa191737e415b37aa3418919db33e5d774c9db079"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
Markdown = ">=3.3"
|
||||
markupsafe = ">=2.0.1"
|
||||
mkdocs = ">=1.1"
|
||||
|
||||
[[package]]
|
||||
name = "mkdocs-get-deps"
|
||||
version = "0.2.0"
|
||||
description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"},
|
||||
{file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mergedeep = ">=1.3.4"
|
||||
platformdirs = ">=2.2.0"
|
||||
pyyaml = ">=5.1"
|
||||
|
||||
[[package]]
|
||||
name = "mkdocstrings"
|
||||
version = "0.29.1"
|
||||
description = "Automatic documentation from sources, for MkDocs."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "mkdocstrings-0.29.1-py3-none-any.whl", hash = "sha256:37a9736134934eea89cbd055a513d40a020d87dfcae9e3052c2a6b8cd4af09b6"},
|
||||
{file = "mkdocstrings-0.29.1.tar.gz", hash = "sha256:8722f8f8c5cd75da56671e0a0c1bbed1df9946c0cef74794d6141b34011abd42"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
Jinja2 = ">=2.11.1"
|
||||
Markdown = ">=3.6"
|
||||
MarkupSafe = ">=1.1"
|
||||
mkdocs = ">=1.6"
|
||||
mkdocs-autorefs = ">=1.4"
|
||||
mkdocstrings-python = {version = ">=1.16.2", optional = true, markers = "extra == \"python\""}
|
||||
pymdown-extensions = ">=6.3"
|
||||
|
||||
[package.extras]
|
||||
crystal = ["mkdocstrings-crystal (>=0.3.4)"]
|
||||
python = ["mkdocstrings-python (>=1.16.2)"]
|
||||
python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "mkdocstrings-python"
|
||||
version = "1.16.10"
|
||||
description = "A Python handler for mkdocstrings."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "mkdocstrings_python-1.16.10-py3-none-any.whl", hash = "sha256:63bb9f01f8848a644bdb6289e86dc38ceddeaa63ecc2e291e3b2ca52702a6643"},
|
||||
{file = "mkdocstrings_python-1.16.10.tar.gz", hash = "sha256:f9eedfd98effb612ab4d0ed6dd2b73aff6eba5215e0a65cea6d877717f75502e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
griffe = ">=1.6.2"
|
||||
mkdocs-autorefs = ">=1.4"
|
||||
mkdocstrings = ">=0.28.3"
|
||||
typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "multidict"
|
||||
version = "6.2.0"
|
||||
@@ -985,6 +1490,33 @@ files = [
|
||||
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.75.0"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "openai-1.75.0-py3-none-any.whl", hash = "sha256:fe6f932d2ded3b429ff67cc9ad118c71327db32eb9d32dd723de3acfca337125"},
|
||||
{file = "openai-1.75.0.tar.gz", hash = "sha256:fb3ea907efbdb1bcfd0c44507ad9c961afd7dce3147292b54505ecfd17be8fd1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=3.5.0,<5"
|
||||
distro = ">=1.7.0,<2"
|
||||
httpx = ">=0.23.0,<1"
|
||||
jiter = ">=0.4.0,<1"
|
||||
pydantic = ">=1.9.0,<3"
|
||||
sniffio = "*"
|
||||
tqdm = ">4"
|
||||
typing-extensions = ">=4.11,<5"
|
||||
|
||||
[package.extras]
|
||||
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
|
||||
realtime = ["websockets (>=13,<16)"]
|
||||
voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-api"
|
||||
version = "1.31.1"
|
||||
@@ -1179,7 +1711,7 @@ version = "24.2"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main", "lint", "test"]
|
||||
groups = ["main", "docs", "lint", "test"]
|
||||
files = [
|
||||
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
|
||||
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
|
||||
@@ -1192,7 +1724,7 @@ version = "0.12.1"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["lint"]
|
||||
groups = ["docs", "lint"]
|
||||
files = [
|
||||
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
|
||||
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
|
||||
@@ -1204,7 +1736,7 @@ version = "4.3.7"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["lint"]
|
||||
groups = ["docs", "lint"]
|
||||
files = [
|
||||
{file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"},
|
||||
{file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"},
|
||||
@@ -1452,7 +1984,7 @@ version = "2.10.6"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
groups = ["main", "docs"]
|
||||
files = [
|
||||
{file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
|
||||
{file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
|
||||
@@ -1473,7 +2005,7 @@ version = "2.27.2"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
groups = ["main", "docs"]
|
||||
files = [
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
|
||||
@@ -1601,6 +2133,45 @@ azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0
|
||||
toml = ["tomli (>=2.0.1)"]
|
||||
yaml = ["pyyaml (>=6.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pydoclint"
|
||||
version = "0.6.6"
|
||||
description = "A Python docstring linter that checks arguments, returns, yields, and raises sections"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "pydoclint-0.6.6-py2.py3-none-any.whl", hash = "sha256:7ce8ed36f60f9201bf1c1edacb32c55eb051af80fdd7304480c6419ee0ced43c"},
|
||||
{file = "pydoclint-0.6.6.tar.gz", hash = "sha256:22862a8494d05cdf22574d6533f4c47933c0ae1674b0f8b961d6ef42536eaa69"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.1.0"
|
||||
docstring_parser_fork = ">=0.0.12"
|
||||
tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
flake8 = ["flake8 (>=4)"]
|
||||
|
||||
[[package]]
|
||||
name = "pymdown-extensions"
|
||||
version = "10.14.3"
|
||||
description = "Extension pack for Python Markdown."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "pymdown_extensions-10.14.3-py3-none-any.whl", hash = "sha256:05e0bee73d64b9c71a4ae17c72abc2f700e8bc8403755a00580b49a4e9f189e9"},
|
||||
{file = "pymdown_extensions-10.14.3.tar.gz", hash = "sha256:41e576ce3f5d650be59e900e4ceff231e0aed2a88cf30acaee41e02f063a061b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
markdown = ">=3.6"
|
||||
pyyaml = "*"
|
||||
|
||||
[package.extras]
|
||||
extra = ["pygments (>=2.19.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.3.5"
|
||||
@@ -1686,7 +2257,7 @@ version = "2.9.0.post0"
|
||||
description = "Extensions to the standard Python datetime module"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
groups = ["main"]
|
||||
groups = ["main", "docs"]
|
||||
files = [
|
||||
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
|
||||
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
|
||||
@@ -1716,7 +2287,7 @@ version = "6.0.2"
|
||||
description = "YAML parser and emitter for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
groups = ["main", "docs"]
|
||||
files = [
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
|
||||
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
|
||||
@@ -1773,6 +2344,21 @@ files = [
|
||||
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml-env-tag"
|
||||
version = "0.1"
|
||||
description = "A custom YAML tag for referencing environment variables in YAML files. "
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"},
|
||||
{file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pyyaml = "*"
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.32.3"
|
||||
@@ -1851,12 +2437,36 @@ version = "1.17.0"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
groups = ["main"]
|
||||
groups = ["main", "docs"]
|
||||
files = [
|
||||
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
|
||||
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
description = "Sniff out which async library your code is running under"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
|
||||
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "soupsieve"
|
||||
version = "2.6"
|
||||
description = "A modern CSS selector implementation for Beautiful Soup."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"},
|
||||
{file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tenacity"
|
||||
version = "9.0.0"
|
||||
@@ -1879,7 +2489,7 @@ version = "2.2.1"
|
||||
description = "A lil' TOML parser"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["lint", "test"]
|
||||
groups = ["docs", "lint", "test"]
|
||||
markers = "python_version < \"3.11\""
|
||||
files = [
|
||||
{file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
|
||||
@@ -1916,6 +2526,55 @@ files = [
|
||||
{file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tqdm"
|
||||
version = "4.67.1"
|
||||
description = "Fast, Extensible Progress Meter"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"},
|
||||
{file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[package.extras]
|
||||
dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"]
|
||||
discord = ["requests"]
|
||||
notebook = ["ipywidgets (>=6)"]
|
||||
slack = ["slack-sdk"]
|
||||
telegram = ["requests"]
|
||||
|
||||
[[package]]
|
||||
name = "types-beautifulsoup4"
|
||||
version = "4.12.0.20250204"
|
||||
description = "Typing stubs for beautifulsoup4"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "types_beautifulsoup4-4.12.0.20250204-py3-none-any.whl", hash = "sha256:57ce9e75717b63c390fd789c787d267a67eb01fa6d800a03b9bdde2e877ed1eb"},
|
||||
{file = "types_beautifulsoup4-4.12.0.20250204.tar.gz", hash = "sha256:f083d8edcbd01279f8c3995b56cfff2d01f1bb894c3b502ba118d36fbbc495bf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
types-html5lib = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-html5lib"
|
||||
version = "1.1.11.20241018"
|
||||
description = "Typing stubs for html5lib"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "types-html5lib-1.1.11.20241018.tar.gz", hash = "sha256:98042555ff78d9e3a51c77c918b1041acbb7eb6c405408d8a9e150ff5beccafa"},
|
||||
{file = "types_html5lib-1.1.11.20241018-py3-none-any.whl", hash = "sha256:3f1e064d9ed2c289001ae6392c84c93833abb0816165c6ff0abfc304a779f403"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-protobuf"
|
||||
version = "5.29.1.20250315"
|
||||
@@ -1985,7 +2644,7 @@ version = "4.12.2"
|
||||
description = "Backported and Experimental Type Hints for Python 3.8+"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main", "lint", "test"]
|
||||
groups = ["main", "docs", "lint", "test"]
|
||||
files = [
|
||||
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
|
||||
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
||||
@@ -2022,6 +2681,49 @@ h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "watchdog"
|
||||
version = "6.0.0"
|
||||
description = "Filesystem events monitoring"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["docs"]
|
||||
files = [
|
||||
{file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"},
|
||||
{file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"},
|
||||
{file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"},
|
||||
{file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"},
|
||||
{file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"},
|
||||
{file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"},
|
||||
{file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"},
|
||||
{file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"},
|
||||
{file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"},
|
||||
{file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"},
|
||||
{file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"},
|
||||
{file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"},
|
||||
{file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"},
|
||||
{file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"},
|
||||
{file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"},
|
||||
{file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"},
|
||||
{file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"},
|
||||
{file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"},
|
||||
{file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"},
|
||||
{file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"},
|
||||
{file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"},
|
||||
{file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"},
|
||||
{file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"},
|
||||
{file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"},
|
||||
{file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"},
|
||||
{file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"},
|
||||
{file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"},
|
||||
{file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"},
|
||||
{file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"},
|
||||
{file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
watchmedo = ["PyYAML (>=3.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "wrapt"
|
||||
version = "1.17.2"
|
||||
@@ -2236,4 +2938,4 @@ otel = ["opentelemetry-api", "opentelemetry-distro", "opentelemetry-exporter-otl
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "4b0f40679afd769c78fcf643a82dfe88cf4600760ae116d81586353f0f82d4aa"
|
||||
content-hash = "7214497e355346abbc18b0e54901b8278c0856be9d8d26c34eb0514d4236eb31"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "hatchet-sdk"
|
||||
version = "1.6.2"
|
||||
version = "1.6.3"
|
||||
description = ""
|
||||
authors = ["Alexander Belanger <alexander@hatchet.run>"]
|
||||
readme = "README.md"
|
||||
@@ -53,6 +53,16 @@ pytest-env = "^1.1.5"
|
||||
pytest-retry = "^1.7.0"
|
||||
psycopg = { extras = ["pool"], version = "^3.2.6" }
|
||||
|
||||
|
||||
[tool.poetry.group.docs.dependencies]
|
||||
mkdocs = "^1.6.1"
|
||||
mkdocstrings = { extras = ["python"], version = "^0.29.1" }
|
||||
pydoclint = "^0.6.6"
|
||||
markdownify = "^1.1.0"
|
||||
openai = "^1.75.0"
|
||||
bs4 = "^0.0.2"
|
||||
types-beautifulsoup4 = "^4.12.0.20250204"
|
||||
|
||||
[tool.poetry.extras]
|
||||
otel = [
|
||||
"opentelemetry-api",
|
||||
@@ -89,7 +99,7 @@ known_third_party = [
|
||||
extend_skip = ["hatchet_sdk/contracts/", "hatchet_sdk/v0/contracts/"]
|
||||
|
||||
[tool.black]
|
||||
extend_exclude = "(hatchet_sdk/contracts/|hatchet_sdk/v0/contracts/)"
|
||||
extend_exclude = "(hatchet_sdk/contracts/|hatchet_sdk/v0/contracts/|site)"
|
||||
|
||||
[tool.mypy]
|
||||
files = ["."]
|
||||
@@ -102,6 +112,7 @@ exclude = [
|
||||
"hatchet_sdk/clients/rest/exceptions.py",
|
||||
"hatchet_sdk/clients/rest/rest.py",
|
||||
"hatchet_sdk/v0/*",
|
||||
"site/*",
|
||||
]
|
||||
strict = true
|
||||
enable_error_code = [
|
||||
@@ -121,6 +132,7 @@ exclude = [
|
||||
"hatchet_sdk/clients/rest/exceptions.py",
|
||||
"hatchet_sdk/clients/rest/rest.py",
|
||||
"hatchet_sdk/v0/*",
|
||||
"site/*",
|
||||
]
|
||||
target-version = "py310"
|
||||
|
||||
@@ -155,3 +167,13 @@ waits = "examples.waits.worker:main"
|
||||
durable = "examples.durable.worker:main"
|
||||
streaming = "examples.streaming.worker:main"
|
||||
lifespans = "examples.lifespans.worker:main"
|
||||
docs-gen = "docs.generator.generate:main"
|
||||
|
||||
[tool.pydoclint]
|
||||
style = 'sphinx'
|
||||
exclude = 'v0|clients/rest/*|contracts/*|.venv|site/*'
|
||||
arg-type-hints-in-docstring = false # Automatically checked by mypy and mkdocs
|
||||
check-return-types = false # Automatically checked by mypy and mkdocs
|
||||
|
||||
[tool.poetry.plugins."mkdocs.plugins"]
|
||||
"markdown-export" = "docs.generator.markdown_export:MarkdownExportPlugin"
|
||||
|
||||
Reference in New Issue
Block a user