Feat: Expand OpenTelemetry Integration (#1792)

* feat: initial work expanding instrumentation

* feat: automatically inject traceparent into metadata in instrumentor

* feat: deprecate old attrs

* feat: add new namespaced attrs

* chore: version

* fix: type bug

* feat: tracing scheduled workflows

* fix: don't need duplication

* feat: convert timestamps to ISO

* fix: warn on use of old methods

* feat: changelog

* fix: enum breakages

* fix: docs

* feat: add a couple of additional attrs to bulk events

* cleanup: types

* fix: comment

* fix: example

* feat: langfuse example

* tweak: edge cases

* feat: example cleanup

* feat: examples

* chore: gen

* feat: langfuse docs

* feat: extend docs

* fix: lint

* fix: disclaimer

* fix: start and end whitespace

* fix: rm langfuse for now

* fix: rm langfuse trace pic

* fix: ci config
This commit is contained in:
Matt Kaye
2025-05-30 11:20:00 -04:00
committed by GitHub
parent 15fb9f8ac0
commit bd1452482e
14 changed files with 513 additions and 167 deletions
+9
View File
@@ -5,6 +5,15 @@ All notable changes to Hatchet's Python SDK will be documented in this changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.11.0] - 2025-05-29
### Changed
- Significant improvements to the OpenTelemetry instrumentor, including:
- Traceparents are automatically propagated through the metadata now so the client does not need to provide them manually.
- Added a handful of attributes to the `run_workflow`, `push_event`, etc. spans, such as the workflow being run / event being pushed, the metadata, and so on. Ignoring
- Added tracing for workflow scheduling
## [1.10.2] - 2025-05-19
### Changed
@@ -5,21 +5,17 @@ from examples.opentelemetry_instrumentation.tracer import trace_provider
from examples.opentelemetry_instrumentation.worker import otel_workflow
from hatchet_sdk.clients.admin import TriggerWorkflowOptions
from hatchet_sdk.clients.events import BulkPushEventWithMetadata, PushEventOptions
from hatchet_sdk.opentelemetry.instrumentor import (
HatchetInstrumentor,
inject_traceparent_into_metadata,
)
from hatchet_sdk.opentelemetry.instrumentor import HatchetInstrumentor
instrumentor = HatchetInstrumentor(tracer_provider=trace_provider)
tracer = trace_provider.get_tracer(__name__)
def create_additional_metadata() -> dict[str, str]:
return inject_traceparent_into_metadata({"hello": "world"})
ADDITIONAL_METADATA = {"hello": "world"}
def create_push_options() -> PushEventOptions:
return PushEventOptions(additional_metadata=create_additional_metadata())
return PushEventOptions(additional_metadata=ADDITIONAL_METADATA)
def push_event() -> None:
@@ -48,12 +44,12 @@ def bulk_push_event() -> None:
BulkPushEventWithMetadata(
key="otel:event",
payload={"test": "test 1"},
additional_metadata=create_additional_metadata(),
additional_metadata=ADDITIONAL_METADATA,
),
BulkPushEventWithMetadata(
key="otel:event",
payload={"test": "test 2"},
additional_metadata=create_additional_metadata(),
additional_metadata=ADDITIONAL_METADATA,
),
],
)
@@ -67,12 +63,12 @@ async def async_bulk_push_event() -> None:
BulkPushEventWithMetadata(
key="otel:event",
payload={"test": "test 1"},
additional_metadata=create_additional_metadata(),
additional_metadata=ADDITIONAL_METADATA,
),
BulkPushEventWithMetadata(
key="otel:event",
payload={"test": "test 2"},
additional_metadata=create_additional_metadata(),
additional_metadata=ADDITIONAL_METADATA,
),
],
)
@@ -82,9 +78,7 @@ def run_workflow() -> None:
print("\nrun_workflow")
with tracer.start_as_current_span("run_workflow"):
otel_workflow.run(
options=TriggerWorkflowOptions(
additional_metadata=create_additional_metadata()
),
options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),
)
@@ -92,9 +86,7 @@ async def async_run_workflow() -> None:
print("\nasync_run_workflow")
with tracer.start_as_current_span("async_run_workflow"):
await otel_workflow.aio_run(
options=TriggerWorkflowOptions(
additional_metadata=create_additional_metadata()
),
options=TriggerWorkflowOptions(additional_metadata=ADDITIONAL_METADATA),
)
@@ -105,12 +97,12 @@ def run_workflows() -> None:
[
otel_workflow.create_bulk_run_item(
options=TriggerWorkflowOptions(
additional_metadata=create_additional_metadata()
additional_metadata=ADDITIONAL_METADATA
)
),
otel_workflow.create_bulk_run_item(
options=TriggerWorkflowOptions(
additional_metadata=create_additional_metadata()
additional_metadata=ADDITIONAL_METADATA
)
),
],
@@ -124,12 +116,12 @@ async def async_run_workflows() -> None:
[
otel_workflow.create_bulk_run_item(
options=TriggerWorkflowOptions(
additional_metadata=create_additional_metadata()
additional_metadata=ADDITIONAL_METADATA
)
),
otel_workflow.create_bulk_run_item(
options=TriggerWorkflowOptions(
additional_metadata=create_additional_metadata()
additional_metadata=ADDITIONAL_METADATA
)
),
],
@@ -1,6 +1,8 @@
import json
from importlib.metadata import version
from typing import Any, Callable, Collection, Coroutine
from typing import Any, Callable, Collection, Coroutine, Union, cast
from hatchet_sdk.contracts import workflows_pb2 as v0_workflow_protos
from hatchet_sdk.utils.typing import JSONSerializableMapping
try:
@@ -26,20 +28,29 @@ except (RuntimeError, ImportError, ModuleNotFoundError):
"To use the HatchetInstrumentor, you must install Hatchet's `otel` extra using (e.g.) `pip install hatchet-sdk[otel]`"
)
import inspect
from datetime import datetime
from google.protobuf import timestamp_pb2
import hatchet_sdk
from hatchet_sdk import ClientConfig
from hatchet_sdk.clients.admin import (
AdminClient,
ScheduleTriggerWorkflowOptions,
TriggerWorkflowOptions,
WorkflowRunTriggerConfig,
)
from hatchet_sdk.clients.events import (
BulkPushEventOptions,
BulkPushEventWithMetadata,
EventClient,
PushEventOptions,
)
from hatchet_sdk.contracts.events_pb2 import Event
from hatchet_sdk.logger import logger
from hatchet_sdk.runnables.action import Action
from hatchet_sdk.utils.opentelemetry import OTelAttribute
from hatchet_sdk.worker.runner.runner import Runner
from hatchet_sdk.workflow_run import WorkflowRunRef
@@ -51,6 +62,13 @@ OTEL_TRACEPARENT_KEY = "traceparent"
def create_traceparent() -> str | None:
logger.warning(
"As of SDK version 1.11.0, you no longer need to call `create_traceparent` manually. The traceparent will be automatically created by the instrumentor and injected into the metadata of actions and events when appropriate. This method will be removed in a future version.",
)
return _create_traceparent()
def _create_traceparent() -> str | None:
"""
Creates and returns a W3C traceparent header value using OpenTelemetry's context propagation.
@@ -70,6 +88,16 @@ def create_traceparent() -> str | None:
def parse_carrier_from_metadata(
metadata: JSONSerializableMapping | None,
) -> Context | None:
logger.warning(
"As of SDK version 1.11.0, you no longer need to call `parse_carrier_from_metadata` manually. This method will be removed in a future version.",
)
return _parse_carrier_from_metadata(metadata)
def _parse_carrier_from_metadata(
metadata: JSONSerializableMapping | None,
) -> Context | None:
"""
Parses OpenTelemetry trace context from a metadata dictionary.
@@ -86,7 +114,7 @@ def parse_carrier_from_metadata(
:Example:
>>> metadata = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01"}
>>> context = parse_carrier_from_metadata(metadata)
>>> context = _parse_carrier_from_metadata(metadata)
"""
if not metadata:
@@ -102,6 +130,16 @@ def parse_carrier_from_metadata(
def inject_traceparent_into_metadata(
metadata: dict[str, str], traceparent: str | None = None
) -> dict[str, str]:
logger.warning(
"As of SDK version 1.11.0, you no longer need to call `inject_traceparent_into_metadata` manually. The traceparent will automatically be injected by the instrumentor. This method will be removed in a future version.",
)
return _inject_traceparent_into_metadata(metadata, traceparent)
def _inject_traceparent_into_metadata(
metadata: dict[str, str], traceparent: str | None = None
) -> dict[str, str]:
"""
Injects OpenTelemetry `traceparent` into a metadata dictionary.
@@ -125,9 +163,8 @@ def inject_traceparent_into_metadata(
>>> print(new_metadata)
{"key": "value", "traceparent": "00-4bf92f3577b34da6a3ce929d0e0e4736-00f067aa0ba902b7-01"}
"""
if not traceparent:
traceparent = create_traceparent()
traceparent = _create_traceparent()
if not traceparent:
return metadata
@@ -213,6 +250,14 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
self._wrap_async_run_workflow,
)
## IMPORTANT: We don't need to instrument the async version of `schedule_workflow`
## because it just calls the sync version internally.
wrap_function_wrapper(
hatchet_sdk,
"clients.admin.AdminClient.schedule_workflow",
self._wrap_schedule_workflow,
)
wrap_function_wrapper(
hatchet_sdk,
"clients.admin.AdminClient.run_workflows",
@@ -225,6 +270,19 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
self._wrap_async_run_workflows,
)
def extract_bound_args(
self,
wrapped_func: Callable[..., Any],
args: tuple[Any, ...],
kwargs: dict[str, Any],
) -> list[Any]:
sig = inspect.signature(wrapped_func)
bound_args = sig.bind(*args, **kwargs)
bound_args.apply_defaults()
return list(bound_args.arguments.values())
## IMPORTANT: Keep these types in sync with the wrapped method's signature
async def _wrap_handle_start_step_run(
self,
@@ -233,8 +291,11 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
args: tuple[Action],
kwargs: Any,
) -> Exception | None:
action = args[0]
traceparent = parse_carrier_from_metadata(action.additional_metadata)
params = self.extract_bound_args(wrapped, args, kwargs)
action = cast(Action, params[0])
traceparent = _parse_carrier_from_metadata(action.additional_metadata)
with self._tracer.start_as_current_span(
"hatchet.start_step_run",
@@ -290,66 +351,293 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
## IMPORTANT: Keep these types in sync with the wrapped method's signature
def _wrap_push_event(
self,
wrapped: Callable[[str, dict[str, Any], PushEventOptions | None], Event],
wrapped: Callable[[str, dict[str, Any], PushEventOptions], Event],
instance: EventClient,
args: tuple[
str,
dict[str, Any],
PushEventOptions | None,
PushEventOptions,
],
kwargs: dict[str, str | dict[str, Any] | PushEventOptions | None],
kwargs: dict[str, str | dict[str, Any] | PushEventOptions],
) -> Event:
params = self.extract_bound_args(wrapped, args, kwargs)
event_key = cast(str, params[0])
payload = cast(JSONSerializableMapping, params[1])
options = cast(
PushEventOptions,
params[2] if len(params) > 2 else PushEventOptions(),
)
attributes = {
OTelAttribute.EVENT_KEY: event_key,
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
options.additional_metadata, default=str
),
OTelAttribute.NAMESPACE: options.namespace,
OTelAttribute.PRIORITY: options.priority,
OTelAttribute.FILTER_SCOPE: options.scope,
}
with self._tracer.start_as_current_span(
"hatchet.push_event",
attributes={
f"hatchet.{k.value}": v
for k, v in attributes.items()
if v
and k not in self.config.otel.excluded_attributes
and v != "{}"
and v != "[]"
},
):
return wrapped(*args, **kwargs)
options = PushEventOptions(
**options.model_dump(exclude={"additional_metadata"}),
additional_metadata=_inject_traceparent_into_metadata(
dict(options.additional_metadata),
),
)
return wrapped(event_key, dict(payload), options)
## IMPORTANT: Keep these types in sync with the wrapped method's signature
def _wrap_bulk_push_event(
self,
wrapped: Callable[
[list[BulkPushEventWithMetadata], PushEventOptions | None], list[Event]
[list[BulkPushEventWithMetadata], BulkPushEventOptions], list[Event]
],
instance: EventClient,
args: tuple[
list[BulkPushEventWithMetadata],
PushEventOptions | None,
BulkPushEventOptions,
],
kwargs: dict[str, list[BulkPushEventWithMetadata] | PushEventOptions | None],
kwargs: dict[str, list[BulkPushEventWithMetadata] | BulkPushEventOptions],
) -> list[Event]:
params = self.extract_bound_args(wrapped, args, kwargs)
bulk_events = cast(list[BulkPushEventWithMetadata], params[0])
options = cast(BulkPushEventOptions, params[1])
num_bulk_events = len(bulk_events)
unique_event_keys = {event.key for event in bulk_events}
with self._tracer.start_as_current_span(
"hatchet.bulk_push_event",
attributes={
"hatchet.num_events": num_bulk_events,
"hatchet.unique_event_keys": json.dumps(unique_event_keys, default=str),
},
):
return wrapped(*args, **kwargs)
bulk_events_with_meta = [
BulkPushEventWithMetadata(
**event.model_dump(exclude={"additional_metadata"}),
additional_metadata=_inject_traceparent_into_metadata(
dict(event.additional_metadata),
),
)
for event in bulk_events
]
return wrapped(
bulk_events_with_meta,
options,
)
## IMPORTANT: Keep these types in sync with the wrapped method's signature
def _wrap_run_workflow(
self,
wrapped: Callable[[str, Any, TriggerWorkflowOptions | None], WorkflowRunRef],
wrapped: Callable[
[str, JSONSerializableMapping, TriggerWorkflowOptions],
WorkflowRunRef,
],
instance: AdminClient,
args: tuple[str, Any, TriggerWorkflowOptions | None],
kwargs: dict[str, str | Any | TriggerWorkflowOptions | None],
args: tuple[str, JSONSerializableMapping, TriggerWorkflowOptions],
kwargs: dict[str, str | JSONSerializableMapping | TriggerWorkflowOptions],
) -> WorkflowRunRef:
params = self.extract_bound_args(wrapped, args, kwargs)
workflow_name = cast(str, params[0])
payload = cast(JSONSerializableMapping, params[1])
options = cast(
TriggerWorkflowOptions,
params[2] if len(params) > 2 else TriggerWorkflowOptions(),
)
attributes = {
OTelAttribute.WORKFLOW_NAME: workflow_name,
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
OTelAttribute.PARENT_ID: options.parent_id,
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
OTelAttribute.CHILD_INDEX: options.child_index,
OTelAttribute.CHILD_KEY: options.child_key,
OTelAttribute.NAMESPACE: options.namespace,
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
options.additional_metadata, default=str
),
OTelAttribute.PRIORITY: options.priority,
OTelAttribute.DESIRED_WORKER_ID: options.desired_worker_id,
OTelAttribute.STICKY: options.sticky,
OTelAttribute.KEY: options.key,
}
with self._tracer.start_as_current_span(
"hatchet.run_workflow",
attributes={
f"hatchet.{k.value}": v
for k, v in attributes.items()
if v
and k not in self.config.otel.excluded_attributes
and v != "{}"
and v != "[]"
},
):
return wrapped(*args, **kwargs)
options = TriggerWorkflowOptions(
**options.model_dump(exclude={"additional_metadata"}),
additional_metadata=_inject_traceparent_into_metadata(
dict(options.additional_metadata),
),
)
return wrapped(workflow_name, payload, options)
## IMPORTANT: Keep these types in sync with the wrapped method's signature
async def _wrap_async_run_workflow(
self,
wrapped: Callable[
[str, Any, TriggerWorkflowOptions | None],
[str, JSONSerializableMapping, TriggerWorkflowOptions],
Coroutine[None, None, WorkflowRunRef],
],
instance: AdminClient,
args: tuple[str, Any, TriggerWorkflowOptions | None],
kwargs: dict[str, str | Any | TriggerWorkflowOptions | None],
args: tuple[str, JSONSerializableMapping, TriggerWorkflowOptions],
kwargs: dict[str, str | JSONSerializableMapping | TriggerWorkflowOptions],
) -> WorkflowRunRef:
params = self.extract_bound_args(wrapped, args, kwargs)
workflow_name = cast(str, params[0])
payload = cast(JSONSerializableMapping, params[1])
options = cast(
TriggerWorkflowOptions,
params[2] if len(params) > 2 else TriggerWorkflowOptions(),
)
attributes = {
OTelAttribute.WORKFLOW_NAME: workflow_name,
OTelAttribute.ACTION_PAYLOAD: json.dumps(payload, default=str),
OTelAttribute.PARENT_ID: options.parent_id,
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
OTelAttribute.CHILD_INDEX: options.child_index,
OTelAttribute.CHILD_KEY: options.child_key,
OTelAttribute.NAMESPACE: options.namespace,
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
options.additional_metadata, default=str
),
OTelAttribute.PRIORITY: options.priority,
OTelAttribute.DESIRED_WORKER_ID: options.desired_worker_id,
OTelAttribute.STICKY: options.sticky,
OTelAttribute.KEY: options.key,
}
with self._tracer.start_as_current_span(
"hatchet.run_workflow",
attributes={
f"hatchet.{k.value}": v
for k, v in attributes.items()
if v
and k not in self.config.otel.excluded_attributes
and v != "{}"
and v != "[]"
},
):
return await wrapped(*args, **kwargs)
options = TriggerWorkflowOptions(
**options.model_dump(exclude={"additional_metadata"}),
additional_metadata=_inject_traceparent_into_metadata(
dict(options.additional_metadata),
),
)
return await wrapped(workflow_name, payload, options)
def _ts_to_iso(self, ts: Union[datetime, timestamp_pb2.Timestamp]) -> str:
if isinstance(ts, datetime):
return ts.isoformat()
elif isinstance(ts, timestamp_pb2.Timestamp):
return ts.ToJsonString()
else:
raise TypeError(f"Unsupported type for timestamp conversion: {type(ts)}")
## IMPORTANT: Keep these types in sync with the wrapped method's signature
def _wrap_schedule_workflow(
self,
wrapped: Callable[
[
str,
list[Union[datetime, timestamp_pb2.Timestamp]],
JSONSerializableMapping,
ScheduleTriggerWorkflowOptions,
],
v0_workflow_protos.WorkflowVersion,
],
instance: AdminClient,
args: tuple[
str,
list[Union[datetime, timestamp_pb2.Timestamp]],
JSONSerializableMapping,
ScheduleTriggerWorkflowOptions,
],
kwargs: dict[
str,
str
| list[Union[datetime, timestamp_pb2.Timestamp]]
| JSONSerializableMapping
| ScheduleTriggerWorkflowOptions,
],
) -> v0_workflow_protos.WorkflowVersion:
params = self.extract_bound_args(wrapped, args, kwargs)
workflow_name = cast(str, params[0])
schedules = cast(list[Union[datetime, timestamp_pb2.Timestamp]], params[1])
input = cast(JSONSerializableMapping, params[2])
options = cast(
ScheduleTriggerWorkflowOptions,
params[3] if len(params) > 3 else ScheduleTriggerWorkflowOptions(),
)
attributes = {
OTelAttribute.WORKFLOW_NAME: workflow_name,
OTelAttribute.RUN_AT_TIMESTAMPS: json.dumps(
[self._ts_to_iso(ts) for ts in schedules]
),
OTelAttribute.ACTION_PAYLOAD: json.dumps(input, default=str),
OTelAttribute.PARENT_ID: options.parent_id,
OTelAttribute.PARENT_STEP_RUN_ID: options.parent_step_run_id,
OTelAttribute.CHILD_INDEX: options.child_index,
OTelAttribute.CHILD_KEY: options.child_key,
OTelAttribute.NAMESPACE: options.namespace,
OTelAttribute.ADDITIONAL_METADATA: json.dumps(
options.additional_metadata, default=str
),
OTelAttribute.PRIORITY: options.priority,
}
with self._tracer.start_as_current_span(
"hatchet.schedule_workflow",
attributes={
f"hatchet.{k.value}": v
for k, v in attributes.items()
if v
and k not in self.config.otel.excluded_attributes
and v != "{}"
and v != "[]"
},
):
options = ScheduleTriggerWorkflowOptions(
**options.model_dump(exclude={"additional_metadata"}),
additional_metadata=_inject_traceparent_into_metadata(
dict(options.additional_metadata),
),
)
return wrapped(workflow_name, schedules, input, options)
## IMPORTANT: Keep these types in sync with the wrapped method's signature
def _wrap_run_workflows(
@@ -362,10 +650,37 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
args: tuple[list[WorkflowRunTriggerConfig],],
kwargs: dict[str, list[WorkflowRunTriggerConfig]],
) -> list[WorkflowRunRef]:
params = self.extract_bound_args(wrapped, args, kwargs)
workflow_run_configs = cast(list[WorkflowRunTriggerConfig], params[0])
num_workflows = len(workflow_run_configs)
unique_workflow_names = {
config.workflow_name for config in workflow_run_configs
}
with self._tracer.start_as_current_span(
"hatchet.run_workflows",
attributes={
"hatchet.num_workflows": num_workflows,
"hatchet.unique_workflow_names": json.dumps(
unique_workflow_names, default=str
),
},
):
return wrapped(*args, **kwargs)
workflow_run_configs_with_meta = [
WorkflowRunTriggerConfig(
**config.model_dump(exclude={"options"}),
options=TriggerWorkflowOptions(
**config.options.model_dump(exclude={"additional_metadata"}),
additional_metadata=_inject_traceparent_into_metadata(
dict(config.options.additional_metadata),
),
),
)
for config in workflow_run_configs
]
return wrapped(workflow_run_configs_with_meta)
## IMPORTANT: Keep these types in sync with the wrapped method's signature
async def _wrap_async_run_workflows(
@@ -378,10 +693,26 @@ class HatchetInstrumentor(BaseInstrumentor): # type: ignore[misc]
args: tuple[list[WorkflowRunTriggerConfig],],
kwargs: dict[str, list[WorkflowRunTriggerConfig]],
) -> list[WorkflowRunRef]:
params = self.extract_bound_args(wrapped, args, kwargs)
workflow_run_configs = cast(list[WorkflowRunTriggerConfig], params[0])
with self._tracer.start_as_current_span(
"hatchet.run_workflows",
):
return await wrapped(*args, **kwargs)
workflow_run_configs_with_meta = [
WorkflowRunTriggerConfig(
**config.model_dump(exclude={"options"}),
options=TriggerWorkflowOptions(
**config.options.model_dump(exclude={"additional_metadata"}),
additional_metadata=_inject_traceparent_into_metadata(
dict(config.options.additional_metadata),
),
),
)
for config in workflow_run_configs
]
return await wrapped(workflow_run_configs_with_meta)
def _uninstrument(self, **kwargs: InstrumentKwargs) -> None:
self.tracer_provider = NoOpTracerProvider()
+27 -2
View File
@@ -2,8 +2,18 @@ from enum import Enum
class OTelAttribute(str, Enum):
## Shared
NAMESPACE = "namespace"
ADDITIONAL_METADATA = "additional_metadata"
WORKFLOW_NAME = "workflow_name"
PRIORITY = "priority"
## Unfortunately named - this corresponds to all types of payloads, not just actions
ACTION_PAYLOAD = "payload"
## Action
ACTION_NAME = "action_name"
ACTION_PAYLOAD = "action_payload"
CHILD_WORKFLOW_INDEX = "child_workflow_index"
CHILD_WORKFLOW_KEY = "child_workflow_key"
GET_GROUP_KEY_RUN_ID = "get_group_key_run_id"
@@ -14,6 +24,21 @@ class OTelAttribute(str, Enum):
TENANT_ID = "tenant_id"
WORKER_ID = "worker_id"
WORKFLOW_ID = "workflow_id"
WORKFLOW_NAME = "workflow_name"
WORKFLOW_RUN_ID = "workflow_run_id"
WORKFLOW_VERSION_ID = "workflow_version_id"
## Push Event
EVENT_KEY = "event_key"
FILTER_SCOPE = "scope"
## Trigger Workflow
PARENT_ID = "parent_id"
PARENT_STEP_RUN_ID = "parent_step_run_id"
CHILD_INDEX = "child_index"
CHILD_KEY = "child_key"
DESIRED_WORKER_ID = "desired_worker_id"
STICKY = "sticky"
KEY = "key"
## Schedule Workflow
RUN_AT_TIMESTAMPS = "run_at_timestamps"
+45 -45
View File
@@ -1519,15 +1519,15 @@ voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
[[package]]
name = "opentelemetry-api"
version = "1.31.1"
version = "1.33.1"
description = "OpenTelemetry Python API"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_api-1.31.1-py3-none-any.whl", hash = "sha256:1511a3f470c9c8a32eeea68d4ea37835880c0eed09dd1a0187acc8b1301da0a1"},
{file = "opentelemetry_api-1.31.1.tar.gz", hash = "sha256:137ad4b64215f02b3000a0292e077641c8611aab636414632a9b9068593b7e91"},
{file = "opentelemetry_api-1.33.1-py3-none-any.whl", hash = "sha256:4db83ebcf7ea93e64637ec6ee6fabee45c5cbe4abd9cf3da95c43828ddb50b83"},
{file = "opentelemetry_api-1.33.1.tar.gz", hash = "sha256:1c6055fc0a2d3f23a50c7e17e16ef75ad489345fd3df1f8b8af7c0bbf8a109e8"},
]
[package.dependencies]
@@ -1536,69 +1536,69 @@ importlib-metadata = ">=6.0,<8.7.0"
[[package]]
name = "opentelemetry-distro"
version = "0.52b1"
version = "0.54b1"
description = "OpenTelemetry Python Distro"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_distro-0.52b1-py3-none-any.whl", hash = "sha256:5562a039e4c36524d0dbb45a0857f8acfda3afbef7e8462513c7946309eb5c8c"},
{file = "opentelemetry_distro-0.52b1.tar.gz", hash = "sha256:cb8df34a95034c7d038fd245556fb732853dc66473746d652bee6c5c2fb7dfc6"},
{file = "opentelemetry_distro-0.54b1-py3-none-any.whl", hash = "sha256:009486513b32b703e275bb2f9ccaf5791676bbf5e2dcfdd90201ddc8f56f122b"},
{file = "opentelemetry_distro-0.54b1.tar.gz", hash = "sha256:61d6b97bb7a245fddbb829345bb4ad18be39eb52f770fab89a127107fca3149f"},
]
[package.dependencies]
opentelemetry-api = ">=1.12,<2.0"
opentelemetry-instrumentation = "0.52b1"
opentelemetry-instrumentation = "0.54b1"
opentelemetry-sdk = ">=1.13,<2.0"
[package.extras]
otlp = ["opentelemetry-exporter-otlp (==1.31.1)"]
otlp = ["opentelemetry-exporter-otlp (==1.33.1)"]
[[package]]
name = "opentelemetry-exporter-otlp"
version = "1.31.1"
version = "1.33.1"
description = "OpenTelemetry Collector Exporters"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_exporter_otlp-1.31.1-py3-none-any.whl", hash = "sha256:36286c28709cbfba5177129ec30bfe4de67bdec8f375c1703014e0eea44322c6"},
{file = "opentelemetry_exporter_otlp-1.31.1.tar.gz", hash = "sha256:004db12bfafb9e07b79936783d91db214b1e208a152b5c36b1f2ef2264940692"},
{file = "opentelemetry_exporter_otlp-1.33.1-py3-none-any.whl", hash = "sha256:9bcf1def35b880b55a49e31ebd63910edac14b294fd2ab884953c4deaff5b300"},
{file = "opentelemetry_exporter_otlp-1.33.1.tar.gz", hash = "sha256:4d050311ea9486e3994575aa237e32932aad58330a31fba24fdba5c0d531cf04"},
]
[package.dependencies]
opentelemetry-exporter-otlp-proto-grpc = "1.31.1"
opentelemetry-exporter-otlp-proto-http = "1.31.1"
opentelemetry-exporter-otlp-proto-grpc = "1.33.1"
opentelemetry-exporter-otlp-proto-http = "1.33.1"
[[package]]
name = "opentelemetry-exporter-otlp-proto-common"
version = "1.31.1"
version = "1.33.1"
description = "OpenTelemetry Protobuf encoding"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_exporter_otlp_proto_common-1.31.1-py3-none-any.whl", hash = "sha256:7cadf89dbab12e217a33c5d757e67c76dd20ce173f8203e7370c4996f2e9efd8"},
{file = "opentelemetry_exporter_otlp_proto_common-1.31.1.tar.gz", hash = "sha256:c748e224c01f13073a2205397ba0e415dcd3be9a0f95101ba4aace5fc730e0da"},
{file = "opentelemetry_exporter_otlp_proto_common-1.33.1-py3-none-any.whl", hash = "sha256:b81c1de1ad349785e601d02715b2d29d6818aed2c809c20219f3d1f20b038c36"},
{file = "opentelemetry_exporter_otlp_proto_common-1.33.1.tar.gz", hash = "sha256:c57b3fa2d0595a21c4ed586f74f948d259d9949b58258f11edb398f246bec131"},
]
[package.dependencies]
opentelemetry-proto = "1.31.1"
opentelemetry-proto = "1.33.1"
[[package]]
name = "opentelemetry-exporter-otlp-proto-grpc"
version = "1.31.1"
version = "1.33.1"
description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_exporter_otlp_proto_grpc-1.31.1-py3-none-any.whl", hash = "sha256:f4055ad2c9a2ea3ae00cbb927d6253233478b3b87888e197d34d095a62305fae"},
{file = "opentelemetry_exporter_otlp_proto_grpc-1.31.1.tar.gz", hash = "sha256:c7f66b4b333c52248dc89a6583506222c896c74824d5d2060b818ae55510939a"},
{file = "opentelemetry_exporter_otlp_proto_grpc-1.33.1-py3-none-any.whl", hash = "sha256:7e8da32c7552b756e75b4f9e9c768a61eb47dee60b6550b37af541858d669ce1"},
{file = "opentelemetry_exporter_otlp_proto_grpc-1.33.1.tar.gz", hash = "sha256:345696af8dc19785fac268c8063f3dc3d5e274c774b308c634f39d9c21955728"},
]
[package.dependencies]
@@ -1609,62 +1609,62 @@ grpcio = [
{version = ">=1.66.2,<2.0.0", markers = "python_version >= \"3.13\""},
]
opentelemetry-api = ">=1.15,<2.0"
opentelemetry-exporter-otlp-proto-common = "1.31.1"
opentelemetry-proto = "1.31.1"
opentelemetry-sdk = ">=1.31.1,<1.32.0"
opentelemetry-exporter-otlp-proto-common = "1.33.1"
opentelemetry-proto = "1.33.1"
opentelemetry-sdk = ">=1.33.1,<1.34.0"
[[package]]
name = "opentelemetry-exporter-otlp-proto-http"
version = "1.31.1"
version = "1.33.1"
description = "OpenTelemetry Collector Protobuf over HTTP Exporter"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_exporter_otlp_proto_http-1.31.1-py3-none-any.whl", hash = "sha256:5dee1f051f096b13d99706a050c39b08e3f395905f29088bfe59e54218bd1cf4"},
{file = "opentelemetry_exporter_otlp_proto_http-1.31.1.tar.gz", hash = "sha256:723bd90eb12cfb9ae24598641cb0c92ca5ba9f1762103902f6ffee3341ba048e"},
{file = "opentelemetry_exporter_otlp_proto_http-1.33.1-py3-none-any.whl", hash = "sha256:ebd6c523b89a2ecba0549adb92537cc2bf647b4ee61afbbd5a4c6535aa3da7cf"},
{file = "opentelemetry_exporter_otlp_proto_http-1.33.1.tar.gz", hash = "sha256:46622d964a441acb46f463ebdc26929d9dec9efb2e54ef06acdc7305e8593c38"},
]
[package.dependencies]
deprecated = ">=1.2.6"
googleapis-common-protos = ">=1.52,<2.0"
opentelemetry-api = ">=1.15,<2.0"
opentelemetry-exporter-otlp-proto-common = "1.31.1"
opentelemetry-proto = "1.31.1"
opentelemetry-sdk = ">=1.31.1,<1.32.0"
opentelemetry-exporter-otlp-proto-common = "1.33.1"
opentelemetry-proto = "1.33.1"
opentelemetry-sdk = ">=1.33.1,<1.34.0"
requests = ">=2.7,<3.0"
[[package]]
name = "opentelemetry-instrumentation"
version = "0.52b1"
version = "0.54b1"
description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_instrumentation-0.52b1-py3-none-any.whl", hash = "sha256:8c0059c4379d77bbd8015c8d8476020efe873c123047ec069bb335e4b8717477"},
{file = "opentelemetry_instrumentation-0.52b1.tar.gz", hash = "sha256:739f3bfadbbeec04dd59297479e15660a53df93c131d907bb61052e3d3c1406f"},
{file = "opentelemetry_instrumentation-0.54b1-py3-none-any.whl", hash = "sha256:a4ae45f4a90c78d7006c51524f57cd5aa1231aef031eae905ee34d5423f5b198"},
{file = "opentelemetry_instrumentation-0.54b1.tar.gz", hash = "sha256:7658bf2ff914b02f246ec14779b66671508125c0e4227361e56b5ebf6cef0aec"},
]
[package.dependencies]
opentelemetry-api = ">=1.4,<2.0"
opentelemetry-semantic-conventions = "0.52b1"
opentelemetry-semantic-conventions = "0.54b1"
packaging = ">=18.0"
wrapt = ">=1.0.0,<2.0.0"
[[package]]
name = "opentelemetry-proto"
version = "1.31.1"
version = "1.33.1"
description = "OpenTelemetry Python Proto"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_proto-1.31.1-py3-none-any.whl", hash = "sha256:1398ffc6d850c2f1549ce355744e574c8cd7c1dba3eea900d630d52c41d07178"},
{file = "opentelemetry_proto-1.31.1.tar.gz", hash = "sha256:d93e9c2b444e63d1064fb50ae035bcb09e5822274f1683886970d2734208e790"},
{file = "opentelemetry_proto-1.33.1-py3-none-any.whl", hash = "sha256:243d285d9f29663fc7ea91a7171fcc1ccbbfff43b48df0774fd64a37d98eda70"},
{file = "opentelemetry_proto-1.33.1.tar.gz", hash = "sha256:9627b0a5c90753bf3920c398908307063e4458b287bb890e5c1d6fa11ad50b68"},
]
[package.dependencies]
@@ -1672,38 +1672,38 @@ protobuf = ">=5.0,<6.0"
[[package]]
name = "opentelemetry-sdk"
version = "1.31.1"
version = "1.33.1"
description = "OpenTelemetry Python SDK"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_sdk-1.31.1-py3-none-any.whl", hash = "sha256:882d021321f223e37afaca7b4e06c1d8bbc013f9e17ff48a7aa017460a8e7dae"},
{file = "opentelemetry_sdk-1.31.1.tar.gz", hash = "sha256:c95f61e74b60769f8ff01ec6ffd3d29684743404603df34b20aa16a49dc8d903"},
{file = "opentelemetry_sdk-1.33.1-py3-none-any.whl", hash = "sha256:19ea73d9a01be29cacaa5d6c8ce0adc0b7f7b4d58cc52f923e4413609f670112"},
{file = "opentelemetry_sdk-1.33.1.tar.gz", hash = "sha256:85b9fcf7c3d23506fbc9692fd210b8b025a1920535feec50bd54ce203d57a531"},
]
[package.dependencies]
opentelemetry-api = "1.31.1"
opentelemetry-semantic-conventions = "0.52b1"
opentelemetry-api = "1.33.1"
opentelemetry-semantic-conventions = "0.54b1"
typing-extensions = ">=3.7.4"
[[package]]
name = "opentelemetry-semantic-conventions"
version = "0.52b1"
version = "0.54b1"
description = "OpenTelemetry Semantic Conventions"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"otel\""
files = [
{file = "opentelemetry_semantic_conventions-0.52b1-py3-none-any.whl", hash = "sha256:72b42db327e29ca8bb1b91e8082514ddf3bbf33f32ec088feb09526ade4bc77e"},
{file = "opentelemetry_semantic_conventions-0.52b1.tar.gz", hash = "sha256:7b3d226ecf7523c27499758a58b542b48a0ac8d12be03c0488ff8ec60c5bae5d"},
{file = "opentelemetry_semantic_conventions-0.54b1-py3-none-any.whl", hash = "sha256:29dab644a7e435b58d3a3918b58c333c92686236b30f7891d5e51f02933ca60d"},
{file = "opentelemetry_semantic_conventions-0.54b1.tar.gz", hash = "sha256:d1cecedae15d19bdaafca1e56b29a66aa286f50b5d08f036a145c7f3e9ef9cee"},
]
[package.dependencies]
deprecated = ">=1.2.6"
opentelemetry-api = "1.31.1"
opentelemetry-api = "1.33.1"
[[package]]
name = "packaging"
+1 -1
View File
@@ -1,6 +1,6 @@
[tool.poetry]
name = "hatchet-sdk"
version = "1.10.3"
version = "1.11.0"
description = ""
authors = ["Alexander Belanger <alexander@hatchet.run>"]
readme = "README.md"