mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2026-02-06 16:18:45 -06:00
Feat: Expand OpenTelemetry Integration (#1792)
* feat: initial work expanding instrumentation * feat: automatically inject traceparent into metadata in instrumentor * feat: deprecate old attrs * feat: add new namespaced attrs * chore: version * fix: type bug * feat: tracing scheduled workflows * fix: don't need duplication * feat: convert timestamps to ISO * fix: warn on use of old methods * feat: changelog * fix: enum breakages * fix: docs * feat: add a couple of additional attrs to bulk events * cleanup: types * fix: comment * fix: example * feat: langfuse example * tweak: edge cases * feat: example cleanup * feat: examples * chore: gen * feat: langfuse docs * feat: extend docs * fix: lint * fix: disclaimer * fix: start and end whitespace * fix: rm langfuse for now * fix: rm langfuse trace pic * fix: ci config
This commit is contained in:
1
.github/workflows/build.yml
vendored
1
.github/workflows/build.yml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
paths-ignore:
|
||||
- 'sdks/**'
|
||||
- 'frontend/docs/**'
|
||||
- 'examples/**'
|
||||
|
||||
jobs:
|
||||
frontend:
|
||||
|
||||
1
.github/workflows/test.yml
vendored
1
.github/workflows/test.yml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
paths-ignore:
|
||||
- "sdks/**"
|
||||
- "frontend/docs/**"
|
||||
- "examples/**"
|
||||
|
||||
jobs:
|
||||
generate:
|
||||
|
||||
@@ -5,21 +5,17 @@ from examples.opentelemetry_instrumentation.tracer import trace_provider
|
||||
from examples.opentelemetry_instrumentation.worker import otel_workflow
|
||||
from hatchet_sdk.clients.admin import TriggerWorkflowOptions
|
||||
from hatchet_sdk.clients.events import BulkPushEventWithMetadata, PushEventOptions
|
||||
from hatchet_sdk.opentelemetry.instrumentor import (
|
||||
HatchetInstrumentor,
|
||||
inject_traceparent_into_metadata,
|
||||
)
|
||||
from hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor
|
||||
|
||||
instrumentor = HatchetInstrumentor(tracer_provider=trace_provider)
|
||||
tracer = trace_provider.get_tracer(__name__)
|
||||
|
||||
|
||||
def create_additional_metadata() -> dict[str, str]:
|
||||
return inject_traceparent_into_metadata({"hello": "world"})
|
||||
ADDITIONAL_METADATA = {"hello": "world"}
|
||||
|
||||
|
||||
def create_push_options() -> PushEventOptions:
|
||||
return PushEventOptions(additional_metadata=create_additional_metadata())
|
||||
return PushEventOptions(additional_metadata=ADDITIONAL_METADATA)
|
||||
|
||||
|
||||
def push_event() -> None:
|
||||
@@ -48,12 +44,12 @@ def bulk_push_event() -> None:
|
||||
BulkPushEventWithMetadata(
|
||||
key="otel:event",
|
||||
payload={"test": "test 1"},
|
||||
additional_metadata=create_additional_metadata(),
|
||||
additional_metadata=ADDITIONAL_METADATA,
|
||||
),
|
||||
BulkPushEventWithMetadata(
|
||||
key="otel:event",
|
||||
payload={"test": "test 2"},
|
||||
additional_metadata=create_additional_metadata(),
|
||||
additional_metadata=ADDITIONAL_METADATA,
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -67,12 +63,12 @@ async def async_bulk_push_event() -> None:
|
||||
BulkPushEventWithMetadata(
|
||||
key="otel:event",
|
||||
payload={"test": "test 1"},
|
||||
additional_metadata=create_additional_metadata(),
|
||||
additional_metadata=ADDITIONAL_METADATA,
|
||||
),
|
||||
BulkPushEventWithMetadata(
|
||||
key="otel:event",
|
||||
payload={"test": "test 2"},
|
||||
additional_metadata=create_additional_metadata(),
|
||||
additional_metadata=ADDITIONAL_METADATA,
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -82,9 +78,7 @@ def run_workflow() -> None:
|
||||
print("\nrun_workflow")
|
||||
with tracer.start_as_current_span("run_workflow"):
|
||||
otel_workflow.run(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
),
|
||||
options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),
|
||||
)
|
||||
|
||||
|
||||
@@ -92,9 +86,7 @@ async def async_run_workflow() -> None:
|
||||
print("\nasync_run_workflow")
|
||||
with tracer.start_as_current_span("async_run_workflow"):
|
||||
await otel_workflow.aio_run(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
),
|
||||
options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),
|
||||
)
|
||||
|
||||
|
||||
@@ -105,12 +97,12 @@ def run_workflows() -> None:
|
||||
[
|
||||
otel_workflow.create_bulk_run_item(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
additional_metadata=ADDITIONAL_METADATA
|
||||
)
|
||||
),
|
||||
otel_workflow.create_bulk_run_item(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
additional_metadata=ADDITIONAL_METADATA
|
||||
)
|
||||
),
|
||||
],
|
||||
@@ -124,12 +116,12 @@ async def async_run_workflows() -> None:
|
||||
[
|
||||
otel_workflow.create_bulk_run_item(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
additional_metadata=ADDITIONAL_METADATA
|
||||
)
|
||||
),
|
||||
otel_workflow.create_bulk_run_item(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
additional_metadata=ADDITIONAL_METADATA
|
||||
)
|
||||
),
|
||||
],
|
||||
|
||||
48
examples/python/quickstart/poetry.lock
generated
48
examples/python/quickstart/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
@@ -114,7 +114,7 @@ propcache = ">=0.2.0"
|
||||
yarl = ">=1.17.0,<2.0"
|
||||
|
||||
[package.extras]
|
||||
speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
|
||||
speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp-retry"
|
||||
@@ -199,12 +199,12 @@ files = [
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"]
|
||||
tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""]
|
||||
tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
||||
tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "cel-python"
|
||||
@@ -460,14 +460,14 @@ setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "hatchet-sdk"
|
||||
version = "1.10.3"
|
||||
version = "1.0.0a1"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "hatchet_sdk-1.10.3-py3-none-any.whl", hash = "sha256:62aa93e31fe286f03e761dd73997144afd49f4ab9fcab9f4301a74587aea3213"},
|
||||
{file = "hatchet_sdk-1.10.3.tar.gz", hash = "sha256:6f335dea328c5981c2bac9d19d5cc581b294f45863286710bacd111c806fa972"},
|
||||
{file = "hatchet_sdk-1.0.0a1-py3-none-any.whl", hash = "sha256:bfc84358c8842cecd0d95b30645109733b7292dff0db1a776ca862785ee93d7f"},
|
||||
{file = "hatchet_sdk-1.0.0a1.tar.gz", hash = "sha256:f0272bbaac6faed75ff727826e9f7b1ac42ae597f9b590e14d392aada9c9692f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -483,11 +483,13 @@ grpcio-tools = [
|
||||
{version = ">=1.64.1,<1.68.dev0 || >=1.69.dev0", markers = "python_version < \"3.13\""},
|
||||
{version = ">=1.69.0", markers = "python_version >= \"3.13\""},
|
||||
]
|
||||
nest-asyncio = ">=1.6.0,<2.0.0"
|
||||
prometheus-client = ">=0.21.1,<0.22.0"
|
||||
protobuf = ">=5.29.1,<6.0.0"
|
||||
pydantic = ">=2.6.3,<3.0.0"
|
||||
pydantic-settings = ">=2.7.1,<3.0.0"
|
||||
python-dateutil = ">=2.9.0.post0,<3.0.0"
|
||||
pyyaml = ">=6.0.1,<7.0.0"
|
||||
tenacity = ">=8.4.1"
|
||||
urllib3 = ">=1.26.20"
|
||||
|
||||
@@ -643,6 +645,18 @@ files = [
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "nest-asyncio"
|
||||
version = "1.6.0"
|
||||
description = "Patch asyncio to allow nested event loops"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
|
||||
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prometheus-client"
|
||||
version = "0.21.1"
|
||||
@@ -806,7 +820,7 @@ typing-extensions = ">=4.12.2"
|
||||
|
||||
[package.extras]
|
||||
email = ["email-validator (>=2.0.0)"]
|
||||
timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""]
|
||||
timezone = ["tzdata"]
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
@@ -1048,13 +1062,13 @@ files = [
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
|
||||
core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]
|
||||
core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
|
||||
cover = ["pytest-cov"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||
enabler = ["pytest-enabler (>=2.2)"]
|
||||
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||
type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
|
||||
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||
type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
@@ -1133,7 +1147,7 @@ files = [
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
|
||||
h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
@@ -1238,4 +1252,4 @@ propcache = ">=0.2.0"
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "4cad75148c66a0c96b36f629dfd9faacd45f8c8fb63574c3dc9a3033ceab7533"
|
||||
content-hash = "74c12e499aa797ca5c8559af579f1212b0e4e3a77f068f9385db39d70ba304e0"
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types';
|
||||
const snippet: Snippet = {
|
||||
language: 'python',
|
||||
content:
|
||||
'import asyncio\n\nfrom examples.opentelemetry_instrumentation.client import hatchet\nfrom examples.opentelemetry_instrumentation.tracer import trace_provider\nfrom examples.opentelemetry_instrumentation.worker import otel_workflow\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\nfrom hatchet_sdk.clients.events import BulkPushEventWithMetadata, PushEventOptions\nfrom hatchet_sdk.opentelemetry.instrumentor import (\n HatchetInstrumentor,\n inject_traceparent_into_metadata,\n)\n\ninstrumentor = HatchetInstrumentor(tracer_provider=trace_provider)\ntracer = trace_provider.get_tracer(__name__)\n\n\ndef create_additional_metadata() -> dict[str, str]:\n return inject_traceparent_into_metadata({"hello": "world"})\n\n\ndef create_push_options() -> PushEventOptions:\n return PushEventOptions(additional_metadata=create_additional_metadata())\n\n\ndef push_event() -> None:\n print("\\npush_event")\n with tracer.start_as_current_span("push_event"):\n hatchet.event.push(\n "otel:event",\n {"test": "test"},\n options=create_push_options(),\n )\n\n\nasync def async_push_event() -> None:\n print("\\nasync_push_event")\n with tracer.start_as_current_span("async_push_event"):\n await hatchet.event.aio_push(\n "otel:event", {"test": "test"}, options=create_push_options()\n )\n\n\ndef bulk_push_event() -> None:\n print("\\nbulk_push_event")\n with tracer.start_as_current_span("bulk_push_event"):\n hatchet.event.bulk_push(\n [\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 1"},\n additional_metadata=create_additional_metadata(),\n ),\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 2"},\n additional_metadata=create_additional_metadata(),\n ),\n ],\n )\n\n\nasync def async_bulk_push_event() -> None:\n print("\\nasync_bulk_push_event")\n with tracer.start_as_current_span("bulk_push_event"):\n await hatchet.event.aio_bulk_push(\n [\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 1"},\n additional_metadata=create_additional_metadata(),\n ),\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 2"},\n additional_metadata=create_additional_metadata(),\n ),\n ],\n )\n\n\ndef run_workflow() -> None:\n print("\\nrun_workflow")\n with tracer.start_as_current_span("run_workflow"):\n otel_workflow.run(\n options=TriggerWorkflowOptions(\n additional_metadata=create_additional_metadata()\n ),\n )\n\n\nasync def async_run_workflow() -> None:\n print("\\nasync_run_workflow")\n with tracer.start_as_current_span("async_run_workflow"):\n await otel_workflow.aio_run(\n options=TriggerWorkflowOptions(\n additional_metadata=create_additional_metadata()\n ),\n )\n\n\ndef run_workflows() -> None:\n print("\\nrun_workflows")\n with tracer.start_as_current_span("run_workflows"):\n otel_workflow.run_many(\n [\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=create_additional_metadata()\n )\n ),\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=create_additional_metadata()\n )\n ),\n ],\n )\n\n\nasync def async_run_workflows() -> None:\n print("\\nasync_run_workflows")\n with tracer.start_as_current_span("async_run_workflows"):\n await otel_workflow.aio_run_many(\n [\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=create_additional_metadata()\n )\n ),\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=create_additional_metadata()\n )\n ),\n ],\n )\n\n\nasync def main() -> None:\n push_event()\n await async_push_event()\n bulk_push_event()\n await async_bulk_push_event()\n run_workflow()\n # await async_run_workflow()\n run_workflows()\n # await async_run_workflows()\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n',
|
||||
'import asyncio\n\nfrom examples.opentelemetry_instrumentation.client import hatchet\nfrom examples.opentelemetry_instrumentation.tracer import trace_provider\nfrom examples.opentelemetry_instrumentation.worker import otel_workflow\nfrom hatchet_sdk.clients.admin import TriggerWorkflowOptions\nfrom hatchet_sdk.clients.events import BulkPushEventWithMetadata, PushEventOptions\nfrom hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor\n\ninstrumentor = HatchetInstrumentor(tracer_provider=trace_provider)\ntracer = trace_provider.get_tracer(__name__)\n\n\nADDITIONAL_METADATA = {"hello": "world"}\n\n\ndef create_push_options() -> PushEventOptions:\n return PushEventOptions(additional_metadata=ADDITIONAL_METADATA)\n\n\ndef push_event() -> None:\n print("\\npush_event")\n with tracer.start_as_current_span("push_event"):\n hatchet.event.push(\n "otel:event",\n {"test": "test"},\n options=create_push_options(),\n )\n\n\nasync def async_push_event() -> None:\n print("\\nasync_push_event")\n with tracer.start_as_current_span("async_push_event"):\n await hatchet.event.aio_push(\n "otel:event", {"test": "test"}, options=create_push_options()\n )\n\n\ndef bulk_push_event() -> None:\n print("\\nbulk_push_event")\n with tracer.start_as_current_span("bulk_push_event"):\n hatchet.event.bulk_push(\n [\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 1"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 2"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n ],\n )\n\n\nasync def async_bulk_push_event() -> None:\n print("\\nasync_bulk_push_event")\n with tracer.start_as_current_span("bulk_push_event"):\n await hatchet.event.aio_bulk_push(\n [\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 1"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n BulkPushEventWithMetadata(\n key="otel:event",\n payload={"test": "test 2"},\n additional_metadata=ADDITIONAL_METADATA,\n ),\n ],\n )\n\n\ndef run_workflow() -> None:\n print("\\nrun_workflow")\n with tracer.start_as_current_span("run_workflow"):\n otel_workflow.run(\n options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),\n )\n\n\nasync def async_run_workflow() -> None:\n print("\\nasync_run_workflow")\n with tracer.start_as_current_span("async_run_workflow"):\n await otel_workflow.aio_run(\n options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),\n )\n\n\ndef run_workflows() -> None:\n print("\\nrun_workflows")\n with tracer.start_as_current_span("run_workflows"):\n otel_workflow.run_many(\n [\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n ],\n )\n\n\nasync def async_run_workflows() -> None:\n print("\\nasync_run_workflows")\n with tracer.start_as_current_span("async_run_workflows"):\n await otel_workflow.aio_run_many(\n [\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n otel_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n additional_metadata=ADDITIONAL_METADATA\n )\n ),\n ],\n )\n\n\nasync def main() -> None:\n push_event()\n await async_push_event()\n bulk_push_event()\n await async_bulk_push_event()\n run_workflow()\n # await async_run_workflow()\n run_workflows()\n # await async_run_workflows()\n\n\nif __name__ == "__main__":\n asyncio.run(main())\n',
|
||||
source: 'out/python/opentelemetry_instrumentation/triggers.py',
|
||||
blocks: {},
|
||||
highlights: {},
|
||||
|
||||
@@ -41,11 +41,13 @@ const CodeStyleRender = ({ parsed, language }: CodeStyleRenderProps) => {
|
||||
export default CodeStyleRender;
|
||||
|
||||
function dedent(code: string) {
|
||||
const lines = code.split("\n");
|
||||
const trimmedCode = code.replace(/^\n+|\n+$/g, "");
|
||||
|
||||
const lines = trimmedCode.split("\n");
|
||||
const nonEmptyLines = lines.filter((line) => line.trim().length > 0);
|
||||
|
||||
if (nonEmptyLines.length === 0) {
|
||||
return code;
|
||||
return trimmedCode;
|
||||
}
|
||||
|
||||
const minIndent = Math.min(
|
||||
@@ -67,5 +69,5 @@ function dedent(code: string) {
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
return code;
|
||||
return trimmedCode;
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,4 +1,6 @@
|
||||
import { Callout } from "nextra/components";
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
|
||||
# OpenTelemetry
|
||||
|
||||
@@ -12,7 +14,7 @@ Hatchet supports exporting traces from your tasks to an [OpenTelemetry Collector
|
||||
|
||||
### Setup
|
||||
|
||||
Hatchet's SDK provides an instrumentor that auto-instruments Hatchet code, if you opt in. Setup is straightforward:
|
||||
Hatchet's SDK provides an instrumentor that auto-instruments Hatchet code if you opt in. Setup is straightforward:
|
||||
|
||||
First, install the `otel` extra with (e.g.) `pip install hatchet-sdk[otel]`. Then, import the instrumentor:
|
||||
|
||||
@@ -31,39 +33,16 @@ You bring your own trace provider and plug it into the `HatchetInstrumentor`, ca
|
||||
for more information on how to set up a trace provider.
|
||||
</Callout>
|
||||
|
||||
### Providing a `traceparent`
|
||||
|
||||
In some cases, you might also want to provide a `traceparent` so any spans created in Hatchet are children of a parent that was created elsewhere in your application. You can do that by providing a `traceparent` key in the `additional_metadata` field of corresponding options field the following methods:
|
||||
|
||||
- `hatchet.event.push` via the `PushEventOptions`
|
||||
- `hatchet.event.bulk_push` via the `BulkPushEventOptions`
|
||||
- `Workflow.run` via the `TriggerWorkflowOptions` (and similarly for all other flavors of `run`, like `aio_run`, `run_nowait`, etc.)
|
||||
- `Workflow.run_many` via the `TriggerWorkflowOptions` (and similarly for all other flavors of `run_many`, like `aio_run_many`, etc.)
|
||||
|
||||
For example:
|
||||
|
||||
```python
|
||||
hatchet.event.push(
|
||||
"user:create",
|
||||
{'userId': '1234'},
|
||||
options=PushEventOptions(
|
||||
additional_metadata={
|
||||
"traceparent":"00-f1aff5c5ea45185eff2a06fd5c0ed6c5-6f4116aff54d54d1-01" ## example traceparent
|
||||
}
|
||||
)
|
||||
)
|
||||
```
|
||||
|
||||
The `HatchetInstrumentor` also has some methods for generating traceparents that might be helpful:
|
||||
|
||||
1. `create_traceparent` creates a `traceparent`
|
||||
2. `inject_traceparent_into_metadata` injects a traceparent into the `additional_metadata` field
|
||||
|
||||
### Spans
|
||||
|
||||
By default, Hatchet creates spans at the following points in the lifecycle of a task run:
|
||||
|
||||
1. When a trigger is run on the client side, e.g. `run()` or `push()` is called.
|
||||
2. When a worker handles a task event, like starting running the task or cancelling the task
|
||||
2. When a worker handles a task event, such as starting to run the task or cancelling the task
|
||||
|
||||
In addition, you'll get a handful of attributes set (prefixed by `hatchet.`) on the task run events, such as the task name and the worker id, as well as success / failure states, and so on.
|
||||
In addition, you'll get a handful of attributes set (prefixed by `hatchet.`) on the task run events, such as the task name and the worker ID, as well as success/failure states, and so on.
|
||||
|
||||
Some other important notes:
|
||||
|
||||
1. The instrumentor will automatically propagate the trace context between task runs, so if you spawn a task from another task, the child will correctly show up as a child of its parent in the trace waterfall.
|
||||
2. You can exclude specific attributes from being attached to spans by providing the `otel` configuration option on the `ClientConfig` and passing a list of `excluded_attributes`, which come from [this list](https://github.com/hatchet-dev/hatchet/blob/main/sdks/python/hatchet_sdk/utils/opentelemetry.py).
|
||||
|
||||
@@ -5,6 +5,15 @@ All notable changes to Hatchet's Python SDK will be documented in this changelog
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [1.11.0] - 2025-05-29
|
||||
|
||||
### Changed
|
||||
|
||||
- Significant improvements to the OpenTelemetry instrumentor, including:
|
||||
- Traceparents are automatically propagated through the metadata now so the client does not need to provide them manually.
|
||||
- Added a handful of attributes to the `run_workflow`, `push_event`, etc. spans, such as the workflow being run / event being pushed, the metadata, and so on. Ignoring
|
||||
- Added tracing for workflow scheduling
|
||||
|
||||
## [1.10.2] - 2025-05-19
|
||||
|
||||
### Changed
|
||||
|
||||
@@ -5,21 +5,17 @@ from examples.opentelemetry_instrumentation.tracer import trace_provider
|
||||
from examples.opentelemetry_instrumentation.worker import otel_workflow
|
||||
from hatchet_sdk.clients.admin import TriggerWorkflowOptions
|
||||
from hatchet_sdk.clients.events import BulkPushEventWithMetadata, PushEventOptions
|
||||
from hatchet_sdk.opentelemetry.instrumentor import (
|
||||
HatchetInstrumentor,
|
||||
inject_traceparent_into_metadata,
|
||||
)
|
||||
from hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor
|
||||
|
||||
instrumentor = HatchetInstrumentor(tracer_provider=trace_provider)
|
||||
tracer = trace_provider.get_tracer(__name__)
|
||||
|
||||
|
||||
def create_additional_metadata() -> dict[str, str]:
|
||||
return inject_traceparent_into_metadata({"hello": "world"})
|
||||
ADDITIONAL_METADATA = {"hello": "world"}
|
||||
|
||||
|
||||
def create_push_options() -> PushEventOptions:
|
||||
return PushEventOptions(additional_metadata=create_additional_metadata())
|
||||
return PushEventOptions(additional_metadata=ADDITIONAL_METADATA)
|
||||
|
||||
|
||||
def push_event() -> None:
|
||||
@@ -48,12 +44,12 @@ def bulk_push_event() -> None:
|
||||
BulkPushEventWithMetadata(
|
||||
key="otel:event",
|
||||
payload={"test": "test 1"},
|
||||
additional_metadata=create_additional_metadata(),
|
||||
additional_metadata=ADDITIONAL_METADATA,
|
||||
),
|
||||
BulkPushEventWithMetadata(
|
||||
key="otel:event",
|
||||
payload={"test": "test 2"},
|
||||
additional_metadata=create_additional_metadata(),
|
||||
additional_metadata=ADDITIONAL_METADATA,
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -67,12 +63,12 @@ async def async_bulk_push_event() -> None:
|
||||
BulkPushEventWithMetadata(
|
||||
key="otel:event",
|
||||
payload={"test": "test 1"},
|
||||
additional_metadata=create_additional_metadata(),
|
||||
additional_metadata=ADDITIONAL_METADATA,
|
||||
),
|
||||
BulkPushEventWithMetadata(
|
||||
key="otel:event",
|
||||
payload={"test": "test 2"},
|
||||
additional_metadata=create_additional_metadata(),
|
||||
additional_metadata=ADDITIONAL_METADATA,
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -82,9 +78,7 @@ def run_workflow() -> None:
|
||||
print("\nrun_workflow")
|
||||
with tracer.start_as_current_span("run_workflow"):
|
||||
otel_workflow.run(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
),
|
||||
options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),
|
||||
)
|
||||
|
||||
|
||||
@@ -92,9 +86,7 @@ async def async_run_workflow() -> None:
|
||||
print("\nasync_run_workflow")
|
||||
with tracer.start_as_current_span("async_run_workflow"):
|
||||
await otel_workflow.aio_run(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
),
|
||||
options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),
|
||||
)
|
||||
|
||||
|
||||
@@ -105,12 +97,12 @@ def run_workflows() -> None:
|
||||
[
|
||||
otel_workflow.create_bulk_run_item(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
additional_metadata=ADDITIONAL_METADATA
|
||||
)
|
||||
),
|
||||
otel_workflow.create_bulk_run_item(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
additional_metadata=ADDITIONAL_METADATA
|
||||
)
|
||||
),
|
||||
],
|
||||
@@ -124,12 +116,12 @@ async def async_run_workflows() -> None:
|
||||
[
|
||||
otel_workflow.create_bulk_run_item(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
additional_metadata=ADDITIONAL_METADATA
|
||||
)
|
||||
),
|
||||
otel_workflow.create_bulk_run_item(
|
||||
options=TriggerWorkflowOptions(
|
||||
additional_metadata=create_additional_metadata()
|
||||
additional_metadata=ADDITIONAL_METADATA
|
||||
)
|
||||
),
|
||||
],
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import json
|
||||
from importlib.metadata import version
|
||||
from typing import Any, Callable, Collection, Coroutine
|
||||
from typing import Any, Callable, Collection, Coroutine, Union, cast
|
||||
|
||||
from hatchet_sdk.contracts import workflows_pb2 as v0_workflow_protos
|
||||
from hatchet_sdk.utils.typing import JSONSerializableMapping
|
||||
|
||||
try:
|
||||
@@ -26,20 +28,29 @@ except (RuntimeError, ImportError, ModuleNotFoundError):
|
||||
"To use the HatchetInstrumentor, you must install Hatchet's `otel` extra using (e.g.) `pip install hatchet-sdk[otel]`"
|
||||
)
|
||||
|
||||
import inspect
|
||||
from datetime import datetime
|
||||
|
||||
from google.protobuf import timestamp_pb2
|
||||
|
||||
import hatchet_sdk
|
||||
from hatchet_sdk import ClientConfig
|
||||
from hatchet_sdk.clients.admin import (
|
||||
AdminClient,
|
||||
ScheduleTriggerWorkflowOptions,
|
||||
TriggerWorkflowOptions,
|
||||
WorkflowRunTriggerConfig,
|
||||
)
|
||||
from hatchet_sdk.clients.events import (
|
||||
BulkPushEventOptions,
|
||||
BulkPushEventWithMetadata,
|
||||
EventClient,
|
||||
PushEventOptions,
|
||||
)
|
||||
from hatchet_sdk.contracts.events_pb2 import Event
|
||||
from hatchet_sdk.logger import logger
|
||||
from hatchet_sdk.runnables.action import Action
|
||||
from hatchet_sdk.utils.opentelemetry import OTelAttribute
|
||||
from hatchet_sdk.worker.runner.runner import Runner
|
||||
from hatchet_sdk.workflow_run import WorkflowRunRef
|
||||
|
||||
@@ -51,6 +62,13 @@ OTEL_TRACEPARENT_KEY = "traceparent"
|
||||
|
||||
|
||||
def create_traceparent() -> str | None:
|
||||
logger.warning(
|
||||
"As of SDK version 1.11.0, you no longer need to call `create_traceparent` manually. The traceparent will be automatically created by the instrumentor and injected into the metadata of actions and events when appropriate. This method will be removed in a future version.",
|
||||
)
|
||||
return _create_traceparent()
|
||||
|
||||
|
||||
def _create_traceparent() -> str | None:
|
||||
"""
|
||||
Creates and returns a W3C traceparent header value using OpenTelemetry's context propagation.
|
||||
|
||||
@@ -70,6 +88,16 @@ def create_traceparent() -> str | None:
|
||||
|
||||
def parse_carrier_from_metadata(
|
||||
metadata: JSONSerializableMapping | None,
|
||||
) -> Context | None:
|
||||
logger.warning(
|
||||
"As of SDK version 1.11.0, you no longer need to call `parse_carrier_from_metadata` manually. This method will be removed in a future version.",
|
||||
)
|
||||
|
||||
return _parse_carrier_from_metadata(metadata)
|
||||
|
||||
|
||||
def _parse_carrier_from_metadata(
|
||||
metadata: JSONSerializableMapping | None,
|
||||
) -> Context | None:
|
||||
"""
|
||||
Parses OpenTelemetry trace context from a metadata dictionary.
|
||||
@@ -86,7 +114,7 @@ def parse_carrier_from_metadata(
|
||||
:Example:
|
||||
|
||||
>>> metadata = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"}
|
||||
>>> context = parse_carrier_from_metadata(metadata)
|
||||
>>> context = _parse_carrier_from_metadata(metadata)
|
||||
"""
|
||||
|
||||
if not metadata:
|
||||
@@ -102,6 +130,16 @@ def parse_carrier_from_metadata(
|
||||
|
||||
def inject_traceparent_into_metadata(
|
||||
metadata: dict[str, str], traceparent: str | None = None
|
||||
) -> dict[str, str]:
|
||||
logger.warning(
|
||||
"As of SDK version 1.11.0, you no longer need to call `inject_traceparent_into_metadata` manually. The traceparent will automatically be injected by the instrumentor. This method will be removed in a future version.",
|
||||
)
|
||||
|
||||
return _inject_traceparent_into_metadata(metadata, traceparent)
|
||||
|
||||
|
||||
def _inject_traceparent_into_metadata(
|
||||
metadata: dict[str, str], traceparent: str | None = None
|
||||
) -> dict[str, str]:
|
||||
"""
|
||||
Injects OpenTelemetry `traceparent` into a metadata dictionary.
|
||||
@@ -125,9 +163,8 @@ def inject_traceparent_into_metadata(
|
||||
>>> print(new_metadata)
|
||||
{"key": "value", "traceparent": "00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01"}
|
||||
"""
|
||||
|
||||
if not traceparent:
|
||||
traceparent = create_traceparent()
|
||||
traceparent = _create_traceparent()
|
||||
|
||||
if not traceparent:
|
||||
return metadata
|
||||
@@ -213,6 +250,14 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
||||
self._wrap_async_run_workflow,
|
||||
)
|
||||
|
||||
## IMPORTANT: We don't need to instrument the async version of `schedule_workflow`
|
||||
## because it just calls the sync version internally.
|
||||
wrap_function_wrapper(
|
||||
hatchet_sdk,
|
||||
"clients.admin.AdminClient.schedule_workflow",
|
||||
self._wrap_schedule_workflow,
|
||||
)
|
||||
|
||||
wrap_function_wrapper(
|
||||
hatchet_sdk,
|
||||
"clients.admin.AdminClient.run_workflows",
|
||||
@@ -225,6 +270,19 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
||||
self._wrap_async_run_workflows,
|
||||
)
|
||||
|
||||
def extract_bound_args(
|
||||
self,
|
||||
wrapped_func: Callable[..., Any],
|
||||
args: tuple[Any, ...],
|
||||
kwargs: dict[str, Any],
|
||||
) -> list[Any]:
|
||||
sig = inspect.signature(wrapped_func)
|
||||
|
||||
bound_args = sig.bind(*args, **kwargs)
|
||||
bound_args.apply_defaults()
|
||||
|
||||
return list(bound_args.arguments.values())
|
||||
|
||||
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
||||
async def _wrap_handle_start_step_run(
|
||||
self,
|
||||
@@ -233,8 +291,11 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
||||
args: tuple[Action],
|
||||
kwargs: Any,
|
||||
) -> Exception | None:
|
||||
action = args[0]
|
||||
traceparent = parse_carrier_from_metadata(action.additional_metadata)
|
||||
params = self.extract_bound_args(wrapped, args, kwargs)
|
||||
|
||||
action = cast(Action, params[0])
|
||||
|
||||
traceparent = _parse_carrier_from_metadata(action.additional_metadata)
|
||||
|
||||
with self._tracer.start_as_current_span(
|
||||
"hatchet.start_step_run",
|
||||
@@ -290,66 +351,293 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
||||
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
||||
def _wrap_push_event(
|
||||
self,
|
||||
wrapped: Callable[[str, dict[str, Any], PushEventOptions | None], Event],
|
||||
wrapped: Callable[[str, dict[str, Any], PushEventOptions], Event],
|
||||
instance: EventClient,
|
||||
args: tuple[
|
||||
str,
|
||||
dict[str, Any],
|
||||
PushEventOptions | None,
|
||||
PushEventOptions,
|
||||
],
|
||||
kwargs: dict[str, str | dict[str, Any] | PushEventOptions | None],
|
||||
kwargs: dict[str, str | dict[str, Any] | PushEventOptions],
|
||||
) -> Event:
|
||||
params = self.extract_bound_args(wrapped, args, kwargs)
|
||||
|
||||
event_key = cast(str, params[0])
|
||||
payload = cast(JSONSerializableMapping, params[1])
|
||||
options = cast(
|
||||
PushEventOptions,
|
||||
params[2] if len(params) > 2 else PushEventOptions(),
|
||||
)
|
||||
|
||||
attributes = {
|
||||
OTelAttribute.EVENT_KEY: event_key,
|
||||
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
|
||||
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
|
||||
options.additional_metadata, default=str
|
||||
),
|
||||
OTelAttribute.NAMESPACE: options.namespace,
|
||||
OTelAttribute.PRIORITY: options.priority,
|
||||
OTelAttribute.FILTER_SCOPE: options.scope,
|
||||
}
|
||||
|
||||
with self._tracer.start_as_current_span(
|
||||
"hatchet.push_event",
|
||||
attributes={
|
||||
f"hatchet.{k.value}": v
|
||||
for k, v in attributes.items()
|
||||
if v
|
||||
and k not in self.config.otel.excluded_attributes
|
||||
and v != "{}"
|
||||
and v != "[]"
|
||||
},
|
||||
):
|
||||
return wrapped(*args, **kwargs)
|
||||
options = PushEventOptions(
|
||||
**options.model_dump(exclude={"additional_metadata"}),
|
||||
additional_metadata=_inject_traceparent_into_metadata(
|
||||
dict(options.additional_metadata),
|
||||
),
|
||||
)
|
||||
|
||||
return wrapped(event_key, dict(payload), options)
|
||||
|
||||
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
||||
def _wrap_bulk_push_event(
|
||||
self,
|
||||
wrapped: Callable[
|
||||
[list[BulkPushEventWithMetadata], PushEventOptions | None], list[Event]
|
||||
[list[BulkPushEventWithMetadata], BulkPushEventOptions], list[Event]
|
||||
],
|
||||
instance: EventClient,
|
||||
args: tuple[
|
||||
list[BulkPushEventWithMetadata],
|
||||
PushEventOptions | None,
|
||||
BulkPushEventOptions,
|
||||
],
|
||||
kwargs: dict[str, list[BulkPushEventWithMetadata] | PushEventOptions | None],
|
||||
kwargs: dict[str, list[BulkPushEventWithMetadata] | BulkPushEventOptions],
|
||||
) -> list[Event]:
|
||||
params = self.extract_bound_args(wrapped, args, kwargs)
|
||||
|
||||
bulk_events = cast(list[BulkPushEventWithMetadata], params[0])
|
||||
options = cast(BulkPushEventOptions, params[1])
|
||||
|
||||
num_bulk_events = len(bulk_events)
|
||||
unique_event_keys = {event.key for event in bulk_events}
|
||||
|
||||
with self._tracer.start_as_current_span(
|
||||
"hatchet.bulk_push_event",
|
||||
attributes={
|
||||
"hatchet.num_events": num_bulk_events,
|
||||
"hatchet.unique_event_keys": json.dumps(unique_event_keys, default=str),
|
||||
},
|
||||
):
|
||||
return wrapped(*args, **kwargs)
|
||||
bulk_events_with_meta = [
|
||||
BulkPushEventWithMetadata(
|
||||
**event.model_dump(exclude={"additional_metadata"}),
|
||||
additional_metadata=_inject_traceparent_into_metadata(
|
||||
dict(event.additional_metadata),
|
||||
),
|
||||
)
|
||||
for event in bulk_events
|
||||
]
|
||||
|
||||
return wrapped(
|
||||
bulk_events_with_meta,
|
||||
options,
|
||||
)
|
||||
|
||||
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
||||
def _wrap_run_workflow(
|
||||
self,
|
||||
wrapped: Callable[[str, Any, TriggerWorkflowOptions | None], WorkflowRunRef],
|
||||
wrapped: Callable[
|
||||
[str, JSONSerializableMapping, TriggerWorkflowOptions],
|
||||
WorkflowRunRef,
|
||||
],
|
||||
instance: AdminClient,
|
||||
args: tuple[str, Any, TriggerWorkflowOptions | None],
|
||||
kwargs: dict[str, str | Any | TriggerWorkflowOptions | None],
|
||||
args: tuple[str, JSONSerializableMapping, TriggerWorkflowOptions],
|
||||
kwargs: dict[str, str | JSONSerializableMapping | TriggerWorkflowOptions],
|
||||
) -> WorkflowRunRef:
|
||||
params = self.extract_bound_args(wrapped, args, kwargs)
|
||||
|
||||
workflow_name = cast(str, params[0])
|
||||
payload = cast(JSONSerializableMapping, params[1])
|
||||
options = cast(
|
||||
TriggerWorkflowOptions,
|
||||
params[2] if len(params) > 2 else TriggerWorkflowOptions(),
|
||||
)
|
||||
|
||||
attributes = {
|
||||
OTelAttribute.WORKFLOW_NAME: workflow_name,
|
||||
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
|
||||
OTelAttribute.PARENT_ID: options.parent_id,
|
||||
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
|
||||
OTelAttribute.CHILD_INDEX: options.child_index,
|
||||
OTelAttribute.CHILD_KEY: options.child_key,
|
||||
OTelAttribute.NAMESPACE: options.namespace,
|
||||
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
|
||||
options.additional_metadata, default=str
|
||||
),
|
||||
OTelAttribute.PRIORITY: options.priority,
|
||||
OTelAttribute.DESIRED_WORKER_ID: options.desired_worker_id,
|
||||
OTelAttribute.STICKY: options.sticky,
|
||||
OTelAttribute.KEY: options.key,
|
||||
}
|
||||
|
||||
with self._tracer.start_as_current_span(
|
||||
"hatchet.run_workflow",
|
||||
attributes={
|
||||
f"hatchet.{k.value}": v
|
||||
for k, v in attributes.items()
|
||||
if v
|
||||
and k not in self.config.otel.excluded_attributes
|
||||
and v != "{}"
|
||||
and v != "[]"
|
||||
},
|
||||
):
|
||||
return wrapped(*args, **kwargs)
|
||||
options = TriggerWorkflowOptions(
|
||||
**options.model_dump(exclude={"additional_metadata"}),
|
||||
additional_metadata=_inject_traceparent_into_metadata(
|
||||
dict(options.additional_metadata),
|
||||
),
|
||||
)
|
||||
|
||||
return wrapped(workflow_name, payload, options)
|
||||
|
||||
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
||||
async def _wrap_async_run_workflow(
|
||||
self,
|
||||
wrapped: Callable[
|
||||
[str, Any, TriggerWorkflowOptions | None],
|
||||
[str, JSONSerializableMapping, TriggerWorkflowOptions],
|
||||
Coroutine[None, None, WorkflowRunRef],
|
||||
],
|
||||
instance: AdminClient,
|
||||
args: tuple[str, Any, TriggerWorkflowOptions | None],
|
||||
kwargs: dict[str, str | Any | TriggerWorkflowOptions | None],
|
||||
args: tuple[str, JSONSerializableMapping, TriggerWorkflowOptions],
|
||||
kwargs: dict[str, str | JSONSerializableMapping | TriggerWorkflowOptions],
|
||||
) -> WorkflowRunRef:
|
||||
params = self.extract_bound_args(wrapped, args, kwargs)
|
||||
|
||||
workflow_name = cast(str, params[0])
|
||||
payload = cast(JSONSerializableMapping, params[1])
|
||||
options = cast(
|
||||
TriggerWorkflowOptions,
|
||||
params[2] if len(params) > 2 else TriggerWorkflowOptions(),
|
||||
)
|
||||
|
||||
attributes = {
|
||||
OTelAttribute.WORKFLOW_NAME: workflow_name,
|
||||
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
|
||||
OTelAttribute.PARENT_ID: options.parent_id,
|
||||
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
|
||||
OTelAttribute.CHILD_INDEX: options.child_index,
|
||||
OTelAttribute.CHILD_KEY: options.child_key,
|
||||
OTelAttribute.NAMESPACE: options.namespace,
|
||||
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
|
||||
options.additional_metadata, default=str
|
||||
),
|
||||
OTelAttribute.PRIORITY: options.priority,
|
||||
OTelAttribute.DESIRED_WORKER_ID: options.desired_worker_id,
|
||||
OTelAttribute.STICKY: options.sticky,
|
||||
OTelAttribute.KEY: options.key,
|
||||
}
|
||||
|
||||
with self._tracer.start_as_current_span(
|
||||
"hatchet.run_workflow",
|
||||
attributes={
|
||||
f"hatchet.{k.value}": v
|
||||
for k, v in attributes.items()
|
||||
if v
|
||||
and k not in self.config.otel.excluded_attributes
|
||||
and v != "{}"
|
||||
and v != "[]"
|
||||
},
|
||||
):
|
||||
return await wrapped(*args, **kwargs)
|
||||
options = TriggerWorkflowOptions(
|
||||
**options.model_dump(exclude={"additional_metadata"}),
|
||||
additional_metadata=_inject_traceparent_into_metadata(
|
||||
dict(options.additional_metadata),
|
||||
),
|
||||
)
|
||||
|
||||
return await wrapped(workflow_name, payload, options)
|
||||
|
||||
def _ts_to_iso(self, ts: Union[datetime, timestamp_pb2.Timestamp]) -> str:
|
||||
if isinstance(ts, datetime):
|
||||
return ts.isoformat()
|
||||
elif isinstance(ts, timestamp_pb2.Timestamp):
|
||||
return ts.ToJsonString()
|
||||
else:
|
||||
raise TypeError(f"Unsupported type for timestamp conversion: {type(ts)}")
|
||||
|
||||
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
||||
def _wrap_schedule_workflow(
|
||||
self,
|
||||
wrapped: Callable[
|
||||
[
|
||||
str,
|
||||
list[Union[datetime, timestamp_pb2.Timestamp]],
|
||||
JSONSerializableMapping,
|
||||
ScheduleTriggerWorkflowOptions,
|
||||
],
|
||||
v0_workflow_protos.WorkflowVersion,
|
||||
],
|
||||
instance: AdminClient,
|
||||
args: tuple[
|
||||
str,
|
||||
list[Union[datetime, timestamp_pb2.Timestamp]],
|
||||
JSONSerializableMapping,
|
||||
ScheduleTriggerWorkflowOptions,
|
||||
],
|
||||
kwargs: dict[
|
||||
str,
|
||||
str
|
||||
| list[Union[datetime, timestamp_pb2.Timestamp]]
|
||||
| JSONSerializableMapping
|
||||
| ScheduleTriggerWorkflowOptions,
|
||||
],
|
||||
) -> v0_workflow_protos.WorkflowVersion:
|
||||
params = self.extract_bound_args(wrapped, args, kwargs)
|
||||
|
||||
workflow_name = cast(str, params[0])
|
||||
schedules = cast(list[Union[datetime, timestamp_pb2.Timestamp]], params[1])
|
||||
input = cast(JSONSerializableMapping, params[2])
|
||||
options = cast(
|
||||
ScheduleTriggerWorkflowOptions,
|
||||
params[3] if len(params) > 3 else ScheduleTriggerWorkflowOptions(),
|
||||
)
|
||||
|
||||
attributes = {
|
||||
OTelAttribute.WORKFLOW_NAME: workflow_name,
|
||||
OTelAttribute.RUN_AT_TIMESTAMPS: json.dumps(
|
||||
[self._ts_to_iso(ts) for ts in schedules]
|
||||
),
|
||||
OTelAttribute.ACTION_PAYLOAD: json.dumps(input, default=str),
|
||||
OTelAttribute.PARENT_ID: options.parent_id,
|
||||
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
|
||||
OTelAttribute.CHILD_INDEX: options.child_index,
|
||||
OTelAttribute.CHILD_KEY: options.child_key,
|
||||
OTelAttribute.NAMESPACE: options.namespace,
|
||||
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
|
||||
options.additional_metadata, default=str
|
||||
),
|
||||
OTelAttribute.PRIORITY: options.priority,
|
||||
}
|
||||
|
||||
with self._tracer.start_as_current_span(
|
||||
"hatchet.schedule_workflow",
|
||||
attributes={
|
||||
f"hatchet.{k.value}": v
|
||||
for k, v in attributes.items()
|
||||
if v
|
||||
and k not in self.config.otel.excluded_attributes
|
||||
and v != "{}"
|
||||
and v != "[]"
|
||||
},
|
||||
):
|
||||
options = ScheduleTriggerWorkflowOptions(
|
||||
**options.model_dump(exclude={"additional_metadata"}),
|
||||
additional_metadata=_inject_traceparent_into_metadata(
|
||||
dict(options.additional_metadata),
|
||||
),
|
||||
)
|
||||
|
||||
return wrapped(workflow_name, schedules, input, options)
|
||||
|
||||
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
||||
def _wrap_run_workflows(
|
||||
@@ -362,10 +650,37 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
||||
args: tuple[list[WorkflowRunTriggerConfig],],
|
||||
kwargs: dict[str, list[WorkflowRunTriggerConfig]],
|
||||
) -> list[WorkflowRunRef]:
|
||||
params = self.extract_bound_args(wrapped, args, kwargs)
|
||||
workflow_run_configs = cast(list[WorkflowRunTriggerConfig], params[0])
|
||||
|
||||
num_workflows = len(workflow_run_configs)
|
||||
unique_workflow_names = {
|
||||
config.workflow_name for config in workflow_run_configs
|
||||
}
|
||||
|
||||
with self._tracer.start_as_current_span(
|
||||
"hatchet.run_workflows",
|
||||
attributes={
|
||||
"hatchet.num_workflows": num_workflows,
|
||||
"hatchet.unique_workflow_names": json.dumps(
|
||||
unique_workflow_names, default=str
|
||||
),
|
||||
},
|
||||
):
|
||||
return wrapped(*args, **kwargs)
|
||||
workflow_run_configs_with_meta = [
|
||||
WorkflowRunTriggerConfig(
|
||||
**config.model_dump(exclude={"options"}),
|
||||
options=TriggerWorkflowOptions(
|
||||
**config.options.model_dump(exclude={"additional_metadata"}),
|
||||
additional_metadata=_inject_traceparent_into_metadata(
|
||||
dict(config.options.additional_metadata),
|
||||
),
|
||||
),
|
||||
)
|
||||
for config in workflow_run_configs
|
||||
]
|
||||
|
||||
return wrapped(workflow_run_configs_with_meta)
|
||||
|
||||
## IMPORTANT: Keep these types in sync with the wrapped method's signature
|
||||
async def _wrap_async_run_workflows(
|
||||
@@ -378,10 +693,26 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
|
||||
args: tuple[list[WorkflowRunTriggerConfig],],
|
||||
kwargs: dict[str, list[WorkflowRunTriggerConfig]],
|
||||
) -> list[WorkflowRunRef]:
|
||||
params = self.extract_bound_args(wrapped, args, kwargs)
|
||||
workflow_run_configs = cast(list[WorkflowRunTriggerConfig], params[0])
|
||||
|
||||
with self._tracer.start_as_current_span(
|
||||
"hatchet.run_workflows",
|
||||
):
|
||||
return await wrapped(*args, **kwargs)
|
||||
workflow_run_configs_with_meta = [
|
||||
WorkflowRunTriggerConfig(
|
||||
**config.model_dump(exclude={"options"}),
|
||||
options=TriggerWorkflowOptions(
|
||||
**config.options.model_dump(exclude={"additional_metadata"}),
|
||||
additional_metadata=_inject_traceparent_into_metadata(
|
||||
dict(config.options.additional_metadata),
|
||||
),
|
||||
),
|
||||
)
|
||||
for config in workflow_run_configs
|
||||
]
|
||||
|
||||
return await wrapped(workflow_run_configs_with_meta)
|
||||
|
||||
def _uninstrument(self, **kwargs: InstrumentKwargs) -> None:
|
||||
self.tracer_provider = NoOpTracerProvider()
|
||||
|
||||
@@ -2,8 +2,18 @@ from enum import Enum
|
||||
|
||||
|
||||
class OTelAttribute(str, Enum):
|
||||
## Shared
|
||||
NAMESPACE = "namespace"
|
||||
ADDITIONAL_METADATA = "additional_metadata"
|
||||
WORKFLOW_NAME = "workflow_name"
|
||||
|
||||
PRIORITY = "priority"
|
||||
|
||||
## Unfortunately named - this corresponds to all types of payloads, not just actions
|
||||
ACTION_PAYLOAD = "payload"
|
||||
|
||||
## Action
|
||||
ACTION_NAME = "action_name"
|
||||
ACTION_PAYLOAD = "action_payload"
|
||||
CHILD_WORKFLOW_INDEX = "child_workflow_index"
|
||||
CHILD_WORKFLOW_KEY = "child_workflow_key"
|
||||
GET_GROUP_KEY_RUN_ID = "get_group_key_run_id"
|
||||
@@ -14,6 +24,21 @@ class OTelAttribute(str, Enum):
|
||||
TENANT_ID = "tenant_id"
|
||||
WORKER_ID = "worker_id"
|
||||
WORKFLOW_ID = "workflow_id"
|
||||
WORKFLOW_NAME = "workflow_name"
|
||||
WORKFLOW_RUN_ID = "workflow_run_id"
|
||||
WORKFLOW_VERSION_ID = "workflow_version_id"
|
||||
|
||||
## Push Event
|
||||
EVENT_KEY = "event_key"
|
||||
FILTER_SCOPE = "scope"
|
||||
|
||||
## Trigger Workflow
|
||||
PARENT_ID = "parent_id"
|
||||
PARENT_STEP_RUN_ID = "parent_step_run_id"
|
||||
CHILD_INDEX = "child_index"
|
||||
CHILD_KEY = "child_key"
|
||||
DESIRED_WORKER_ID = "desired_worker_id"
|
||||
STICKY = "sticky"
|
||||
KEY = "key"
|
||||
|
||||
## Schedule Workflow
|
||||
RUN_AT_TIMESTAMPS = "run_at_timestamps"
|
||||
|
||||
90
sdks/python/poetry.lock
generated
90
sdks/python/poetry.lock
generated
@@ -1519,15 +1519,15 @@ voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-api"
|
||||
version = "1.31.1"
|
||||
version = "1.33.1"
|
||||
description = "OpenTelemetry Python API"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_api-1.31.1-py3-none-any.whl", hash = "sha256:1511a3f470c9c8a32eeea68d4ea37835880c0eed09dd1a0187acc8b1301da0a1"},
|
||||
{file = "opentelemetry_api-1.31.1.tar.gz", hash = "sha256:137ad4b64215f02b3000a0292e077641c8611aab636414632a9b9068593b7e91"},
|
||||
{file = "opentelemetry_api-1.33.1-py3-none-any.whl", hash = "sha256:4db83ebcf7ea93e64637ec6ee6fabee45c5cbe4abd9cf3da95c43828ddb50b83"},
|
||||
{file = "opentelemetry_api-1.33.1.tar.gz", hash = "sha256:1c6055fc0a2d3f23a50c7e17e16ef75ad489345fd3df1f8b8af7c0bbf8a109e8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1536,69 +1536,69 @@ importlib-metadata = ">=6.0,<8.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-distro"
|
||||
version = "0.52b1"
|
||||
version = "0.54b1"
|
||||
description = "OpenTelemetry Python Distro"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_distro-0.52b1-py3-none-any.whl", hash = "sha256:5562a039e4c36524d0dbb45a0857f8acfda3afbef7e8462513c7946309eb5c8c"},
|
||||
{file = "opentelemetry_distro-0.52b1.tar.gz", hash = "sha256:cb8df34a95034c7d038fd245556fb732853dc66473746d652bee6c5c2fb7dfc6"},
|
||||
{file = "opentelemetry_distro-0.54b1-py3-none-any.whl", hash = "sha256:009486513b32b703e275bb2f9ccaf5791676bbf5e2dcfdd90201ddc8f56f122b"},
|
||||
{file = "opentelemetry_distro-0.54b1.tar.gz", hash = "sha256:61d6b97bb7a245fddbb829345bb4ad18be39eb52f770fab89a127107fca3149f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
opentelemetry-api = ">=1.12,<2.0"
|
||||
opentelemetry-instrumentation = "0.52b1"
|
||||
opentelemetry-instrumentation = "0.54b1"
|
||||
opentelemetry-sdk = ">=1.13,<2.0"
|
||||
|
||||
[package.extras]
|
||||
otlp = ["opentelemetry-exporter-otlp (==1.31.1)"]
|
||||
otlp = ["opentelemetry-exporter-otlp (==1.33.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-exporter-otlp"
|
||||
version = "1.31.1"
|
||||
version = "1.33.1"
|
||||
description = "OpenTelemetry Collector Exporters"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_exporter_otlp-1.31.1-py3-none-any.whl", hash = "sha256:36286c28709cbfba5177129ec30bfe4de67bdec8f375c1703014e0eea44322c6"},
|
||||
{file = "opentelemetry_exporter_otlp-1.31.1.tar.gz", hash = "sha256:004db12bfafb9e07b79936783d91db214b1e208a152b5c36b1f2ef2264940692"},
|
||||
{file = "opentelemetry_exporter_otlp-1.33.1-py3-none-any.whl", hash = "sha256:9bcf1def35b880b55a49e31ebd63910edac14b294fd2ab884953c4deaff5b300"},
|
||||
{file = "opentelemetry_exporter_otlp-1.33.1.tar.gz", hash = "sha256:4d050311ea9486e3994575aa237e32932aad58330a31fba24fdba5c0d531cf04"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
opentelemetry-exporter-otlp-proto-grpc = "1.31.1"
|
||||
opentelemetry-exporter-otlp-proto-http = "1.31.1"
|
||||
opentelemetry-exporter-otlp-proto-grpc = "1.33.1"
|
||||
opentelemetry-exporter-otlp-proto-http = "1.33.1"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-exporter-otlp-proto-common"
|
||||
version = "1.31.1"
|
||||
version = "1.33.1"
|
||||
description = "OpenTelemetry Protobuf encoding"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_exporter_otlp_proto_common-1.31.1-py3-none-any.whl", hash = "sha256:7cadf89dbab12e217a33c5d757e67c76dd20ce173f8203e7370c4996f2e9efd8"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_common-1.31.1.tar.gz", hash = "sha256:c748e224c01f13073a2205397ba0e415dcd3be9a0f95101ba4aace5fc730e0da"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_common-1.33.1-py3-none-any.whl", hash = "sha256:b81c1de1ad349785e601d02715b2d29d6818aed2c809c20219f3d1f20b038c36"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_common-1.33.1.tar.gz", hash = "sha256:c57b3fa2d0595a21c4ed586f74f948d259d9949b58258f11edb398f246bec131"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
opentelemetry-proto = "1.31.1"
|
||||
opentelemetry-proto = "1.33.1"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-exporter-otlp-proto-grpc"
|
||||
version = "1.31.1"
|
||||
version = "1.33.1"
|
||||
description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_exporter_otlp_proto_grpc-1.31.1-py3-none-any.whl", hash = "sha256:f4055ad2c9a2ea3ae00cbb927d6253233478b3b87888e197d34d095a62305fae"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_grpc-1.31.1.tar.gz", hash = "sha256:c7f66b4b333c52248dc89a6583506222c896c74824d5d2060b818ae55510939a"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_grpc-1.33.1-py3-none-any.whl", hash = "sha256:7e8da32c7552b756e75b4f9e9c768a61eb47dee60b6550b37af541858d669ce1"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_grpc-1.33.1.tar.gz", hash = "sha256:345696af8dc19785fac268c8063f3dc3d5e274c774b308c634f39d9c21955728"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1609,62 +1609,62 @@ grpcio = [
|
||||
{version = ">=1.66.2,<2.0.0", markers = "python_version >= \"3.13\""},
|
||||
]
|
||||
opentelemetry-api = ">=1.15,<2.0"
|
||||
opentelemetry-exporter-otlp-proto-common = "1.31.1"
|
||||
opentelemetry-proto = "1.31.1"
|
||||
opentelemetry-sdk = ">=1.31.1,<1.32.0"
|
||||
opentelemetry-exporter-otlp-proto-common = "1.33.1"
|
||||
opentelemetry-proto = "1.33.1"
|
||||
opentelemetry-sdk = ">=1.33.1,<1.34.0"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-exporter-otlp-proto-http"
|
||||
version = "1.31.1"
|
||||
version = "1.33.1"
|
||||
description = "OpenTelemetry Collector Protobuf over HTTP Exporter"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_exporter_otlp_proto_http-1.31.1-py3-none-any.whl", hash = "sha256:5dee1f051f096b13d99706a050c39b08e3f395905f29088bfe59e54218bd1cf4"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_http-1.31.1.tar.gz", hash = "sha256:723bd90eb12cfb9ae24598641cb0c92ca5ba9f1762103902f6ffee3341ba048e"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_http-1.33.1-py3-none-any.whl", hash = "sha256:ebd6c523b89a2ecba0549adb92537cc2bf647b4ee61afbbd5a4c6535aa3da7cf"},
|
||||
{file = "opentelemetry_exporter_otlp_proto_http-1.33.1.tar.gz", hash = "sha256:46622d964a441acb46f463ebdc26929d9dec9efb2e54ef06acdc7305e8593c38"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
deprecated = ">=1.2.6"
|
||||
googleapis-common-protos = ">=1.52,<2.0"
|
||||
opentelemetry-api = ">=1.15,<2.0"
|
||||
opentelemetry-exporter-otlp-proto-common = "1.31.1"
|
||||
opentelemetry-proto = "1.31.1"
|
||||
opentelemetry-sdk = ">=1.31.1,<1.32.0"
|
||||
opentelemetry-exporter-otlp-proto-common = "1.33.1"
|
||||
opentelemetry-proto = "1.33.1"
|
||||
opentelemetry-sdk = ">=1.33.1,<1.34.0"
|
||||
requests = ">=2.7,<3.0"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-instrumentation"
|
||||
version = "0.52b1"
|
||||
version = "0.54b1"
|
||||
description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_instrumentation-0.52b1-py3-none-any.whl", hash = "sha256:8c0059c4379d77bbd8015c8d8476020efe873c123047ec069bb335e4b8717477"},
|
||||
{file = "opentelemetry_instrumentation-0.52b1.tar.gz", hash = "sha256:739f3bfadbbeec04dd59297479e15660a53df93c131d907bb61052e3d3c1406f"},
|
||||
{file = "opentelemetry_instrumentation-0.54b1-py3-none-any.whl", hash = "sha256:a4ae45f4a90c78d7006c51524f57cd5aa1231aef031eae905ee34d5423f5b198"},
|
||||
{file = "opentelemetry_instrumentation-0.54b1.tar.gz", hash = "sha256:7658bf2ff914b02f246ec14779b66671508125c0e4227361e56b5ebf6cef0aec"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
opentelemetry-api = ">=1.4,<2.0"
|
||||
opentelemetry-semantic-conventions = "0.52b1"
|
||||
opentelemetry-semantic-conventions = "0.54b1"
|
||||
packaging = ">=18.0"
|
||||
wrapt = ">=1.0.0,<2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-proto"
|
||||
version = "1.31.1"
|
||||
version = "1.33.1"
|
||||
description = "OpenTelemetry Python Proto"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_proto-1.31.1-py3-none-any.whl", hash = "sha256:1398ffc6d850c2f1549ce355744e574c8cd7c1dba3eea900d630d52c41d07178"},
|
||||
{file = "opentelemetry_proto-1.31.1.tar.gz", hash = "sha256:d93e9c2b444e63d1064fb50ae035bcb09e5822274f1683886970d2734208e790"},
|
||||
{file = "opentelemetry_proto-1.33.1-py3-none-any.whl", hash = "sha256:243d285d9f29663fc7ea91a7171fcc1ccbbfff43b48df0774fd64a37d98eda70"},
|
||||
{file = "opentelemetry_proto-1.33.1.tar.gz", hash = "sha256:9627b0a5c90753bf3920c398908307063e4458b287bb890e5c1d6fa11ad50b68"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1672,38 +1672,38 @@ protobuf = ">=5.0,<6.0"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-sdk"
|
||||
version = "1.31.1"
|
||||
version = "1.33.1"
|
||||
description = "OpenTelemetry Python SDK"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_sdk-1.31.1-py3-none-any.whl", hash = "sha256:882d021321f223e37afaca7b4e06c1d8bbc013f9e17ff48a7aa017460a8e7dae"},
|
||||
{file = "opentelemetry_sdk-1.31.1.tar.gz", hash = "sha256:c95f61e74b60769f8ff01ec6ffd3d29684743404603df34b20aa16a49dc8d903"},
|
||||
{file = "opentelemetry_sdk-1.33.1-py3-none-any.whl", hash = "sha256:19ea73d9a01be29cacaa5d6c8ce0adc0b7f7b4d58cc52f923e4413609f670112"},
|
||||
{file = "opentelemetry_sdk-1.33.1.tar.gz", hash = "sha256:85b9fcf7c3d23506fbc9692fd210b8b025a1920535feec50bd54ce203d57a531"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
opentelemetry-api = "1.31.1"
|
||||
opentelemetry-semantic-conventions = "0.52b1"
|
||||
opentelemetry-api = "1.33.1"
|
||||
opentelemetry-semantic-conventions = "0.54b1"
|
||||
typing-extensions = ">=3.7.4"
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-semantic-conventions"
|
||||
version = "0.52b1"
|
||||
version = "0.54b1"
|
||||
description = "OpenTelemetry Semantic Conventions"
|
||||
optional = true
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
markers = "extra == \"otel\""
|
||||
files = [
|
||||
{file = "opentelemetry_semantic_conventions-0.52b1-py3-none-any.whl", hash = "sha256:72b42db327e29ca8bb1b91e8082514ddf3bbf33f32ec088feb09526ade4bc77e"},
|
||||
{file = "opentelemetry_semantic_conventions-0.52b1.tar.gz", hash = "sha256:7b3d226ecf7523c27499758a58b542b48a0ac8d12be03c0488ff8ec60c5bae5d"},
|
||||
{file = "opentelemetry_semantic_conventions-0.54b1-py3-none-any.whl", hash = "sha256:29dab644a7e435b58d3a3918b58c333c92686236b30f7891d5e51f02933ca60d"},
|
||||
{file = "opentelemetry_semantic_conventions-0.54b1.tar.gz", hash = "sha256:d1cecedae15d19bdaafca1e56b29a66aa286f50b5d08f036a145c7f3e9ef9cee"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
deprecated = ">=1.2.6"
|
||||
opentelemetry-api = "1.31.1"
|
||||
opentelemetry-api = "1.33.1"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "hatchet-sdk"
|
||||
version = "1.10.3"
|
||||
version = "1.11.0"
|
||||
description = ""
|
||||
authors = ["Alexander Belanger <alexander@hatchet.run>"]
|
||||
readme = "README.md"
|
||||
|
||||
Reference in New Issue
Block a user