mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2026-01-06 08:49:53 -06:00
feat(ts): improved custom logger (#1652)
* wip improved signatures * feat: retry count and improved logger * task pre * update example
This commit is contained in:
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
'language': 'python',
|
||||
'content': 'import asyncio\nfrom collections import Counter\nfrom datetime import datetime\nfrom random import choice\nfrom typing import Literal\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.concurrency_workflow_level.worker import (\n DIGIT_MAX_RUNS,\n NAME_MAX_RUNS,\n WorkflowInput,\n concurrency_workflow_level_workflow,\n)\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\n\nCharacter = Literal[\'Anna\', \'Vronsky\', \'Stiva\', \'Dolly\', \'Levin\', \'Karenin\']\ncharacters: list[Character] = [\n \'Anna\',\n \'Vronsky\',\n \'Stiva\',\n \'Dolly\',\n \'Levin\',\n \'Karenin\',\n]\n\n\nclass RunMetadata(BaseModel):\n test_run_id: str\n key: str\n name: Character\n digit: str\n started_at: datetime\n finished_at: datetime\n\n @staticmethod\n def parse(task: V1TaskSummary) -> \'RunMetadata\':\n return RunMetadata(\n test_run_id=task.additional_metadata[\'test_run_id\'], # type: ignore\n key=task.additional_metadata[\'key\'], # type: ignore\n name=task.additional_metadata[\'name\'], # type: ignore\n digit=task.additional_metadata[\'digit\'], # type: ignore\n started_at=task.started_at or datetime.max,\n finished_at=task.finished_at or datetime.min,\n )\n\n def __str__(self) -> str:\n return self.key\n\n\n@pytest.mark.asyncio()\nasync def test_workflow_level_concurrency(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n\n run_refs = await concurrency_workflow_level_workflow.aio_run_many_no_wait(\n [\n concurrency_workflow_level_workflow.create_bulk_run_item(\n WorkflowInput(\n name=(name := choice(characters)),\n digit=(digit := choice([str(i) for i in range(6)])),\n ),\n options=TriggerWorkflowOptions(\n additional_metadata={\n \'test_run_id\': test_run_id,\n \'key\': f\'{name}-{digit}\',\n \'name\': name,\n \'digit\': digit,\n },\n ),\n )\n for _ in range(100)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(\n workflow_name=concurrency_workflow_level_workflow.name,\n limit=1_000,\n )\n ).rows\n\n assert workflows\n\n workflow = next(\n (w for w in workflows if w.name == concurrency_workflow_level_workflow.name),\n None,\n )\n\n assert workflow\n\n assert workflow.name == concurrency_workflow_level_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n \'test_run_id\': test_run_id,\n },\n limit=1_000,\n )\n\n sorted_runs = sorted(\n [RunMetadata.parse(r) for r in runs.rows], key=lambda r: r.started_at\n )\n\n overlapping_groups: dict[int, list[RunMetadata]] = {}\n\n for run in sorted_runs:\n has_group_membership = False\n\n if not overlapping_groups:\n overlapping_groups[1] = [run]\n continue\n\n if has_group_membership:\n continue\n\n for id, group in overlapping_groups.items():\n if all(are_overlapping(run, task) for task in group):\n overlapping_groups[id].append(run)\n has_group_membership = True\n break\n\n if not has_group_membership:\n overlapping_groups[len(overlapping_groups) + 1] = [run]\n\n for id, group in overlapping_groups.items():\n assert is_valid_group(group), f\'Group {id} is not valid\'\n\n\ndef are_overlapping(x: RunMetadata, y: RunMetadata) -> bool:\n return (x.started_at < y.finished_at and x.finished_at > y.started_at) or (\n x.finished_at > y.started_at and x.started_at < y.finished_at\n )\n\n\ndef is_valid_group(group: list[RunMetadata]) -> bool:\n digits = Counter[str]()\n names = Counter[str]()\n\n for task in group:\n digits[task.digit] += 1\n names[task.name] += 1\n\n if any(v > DIGIT_MAX_RUNS for v in digits.values()):\n return False\n\n if any(v > NAME_MAX_RUNS for v in names.values()):\n return False\n\n return True\n',
|
||||
'content': 'import asyncio\nfrom collections import Counter\nfrom datetime import datetime\nfrom random import choice\nfrom typing import Literal\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.concurrency_workflow_level.worker import (\n DIGIT_MAX_RUNS,\n NAME_MAX_RUNS,\n WorkflowInput,\n concurrency_workflow_level_workflow,\n)\nfrom hatchet_sdk import Hatchet, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\n\nCharacter = Literal[\'Anna\', \'Vronsky\', \'Stiva\', \'Dolly\', \'Levin\', \'Karenin\']\ncharacters: list[Character] = [\n \'Anna\',\n \'Vronsky\',\n \'Stiva\',\n \'Dolly\',\n \'Levin\',\n \'Karenin\',\n]\n\n\nclass RunMetadata(BaseModel):\n test_run_id: str\n key: str\n name: Character\n digit: str\n started_at: datetime\n finished_at: datetime\n\n @staticmethod\n def parse(task: V1TaskSummary) -> \'RunMetadata\':\n return RunMetadata(\n test_run_id=task.additional_metadata[\'test_run_id\'], # type: ignore\n key=task.additional_metadata[\'key\'], # type: ignore\n name=task.additional_metadata[\'name\'], # type: ignore\n digit=task.additional_metadata[\'digit\'], # type: ignore\n started_at=task.started_at or datetime.max,\n finished_at=task.finished_at or datetime.min,\n )\n\n def __str__(self) -> str:\n return self.key\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_workflow_level_concurrency(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n\n run_refs = await concurrency_workflow_level_workflow.aio_run_many_no_wait(\n [\n concurrency_workflow_level_workflow.create_bulk_run_item(\n WorkflowInput(\n name=(name := choice(characters)),\n digit=(digit := choice([str(i) for i in range(6)])),\n ),\n options=TriggerWorkflowOptions(\n additional_metadata={\n \'test_run_id\': test_run_id,\n \'key\': f\'{name}-{digit}\',\n \'name\': name,\n \'digit\': digit,\n },\n ),\n )\n for _ in range(100)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(\n workflow_name=concurrency_workflow_level_workflow.name,\n limit=1_000,\n )\n ).rows\n\n assert workflows\n\n workflow = next(\n (w for w in workflows if w.name == concurrency_workflow_level_workflow.name),\n None,\n )\n\n assert workflow\n\n assert workflow.name == concurrency_workflow_level_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n \'test_run_id\': test_run_id,\n },\n limit=1_000,\n )\n\n sorted_runs = sorted(\n [RunMetadata.parse(r) for r in runs.rows], key=lambda r: r.started_at\n )\n\n overlapping_groups: dict[int, list[RunMetadata]] = {}\n\n for run in sorted_runs:\n has_group_membership = False\n\n if not overlapping_groups:\n overlapping_groups[1] = [run]\n continue\n\n if has_group_membership:\n continue\n\n for id, group in overlapping_groups.items():\n if all(are_overlapping(run, task) for task in group):\n overlapping_groups[id].append(run)\n has_group_membership = True\n break\n\n if not has_group_membership:\n overlapping_groups[len(overlapping_groups) + 1] = [run]\n\n for id, group in overlapping_groups.items():\n assert is_valid_group(group), f\'Group {id} is not valid\'\n\n\ndef are_overlapping(x: RunMetadata, y: RunMetadata) -> bool:\n return (x.started_at < y.finished_at and x.finished_at > y.started_at) or (\n x.finished_at > y.started_at and x.started_at < y.finished_at\n )\n\n\ndef is_valid_group(group: list[RunMetadata]) -> bool:\n digits = Counter[str]()\n names = Counter[str]()\n\n for task in group:\n digits[task.digit] += 1\n names[task.name] += 1\n\n if any(v > DIGIT_MAX_RUNS for v in digits.values()):\n return False\n\n if any(v > NAME_MAX_RUNS for v in names.values()):\n return False\n\n return True\n',
|
||||
'source': 'out/python/concurrency_workflow_level/test_workflow_level_concurrency.py',
|
||||
'blocks': {},
|
||||
'highlights': {}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -2,12 +2,12 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
'language': 'python',
|
||||
'content': 'import time\n\nfrom hatchet_sdk import (\n ConcurrencyExpression,\n ConcurrencyLimitStrategy,\n Context,\n EmptyModel,\n Hatchet,\n)\n\nhatchet = Hatchet(debug=True)\n\n# > Default priority\nDEFAULT_PRIORITY = 1\nSLEEP_TIME = 0.25\n\npriority_workflow = hatchet.workflow(\n name=\'PriorityWorkflow\',\n default_priority=DEFAULT_PRIORITY,\n concurrency=ConcurrencyExpression(\n max_runs=1,\n expression=\'\'true\'\',\n limit_strategy=ConcurrencyLimitStrategy.GROUP_ROUND_ROBIN,\n ),\n)\n\n\n@priority_workflow.task()\ndef priority_task(input: EmptyModel, ctx: Context) -> None:\n print(\'Priority:\', ctx.priority)\n time.sleep(SLEEP_TIME)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \'priority-worker\',\n slots=1,\n workflows=[priority_workflow],\n )\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n',
|
||||
'content': 'import time\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n# > Default priority\nDEFAULT_PRIORITY = 1\nSLEEP_TIME = 0.25\n\npriority_workflow = hatchet.workflow(\n name=\'PriorityWorkflow\',\n default_priority=DEFAULT_PRIORITY,\n)\n\n\n@priority_workflow.task()\ndef priority_task(input: EmptyModel, ctx: Context) -> None:\n print(\'Priority:\', ctx.priority)\n time.sleep(SLEEP_TIME)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \'priority-worker\',\n slots=1,\n workflows=[priority_workflow],\n )\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n',
|
||||
'source': 'out/python/priority/worker.py',
|
||||
'blocks': {
|
||||
'default_priority': {
|
||||
'start': 14,
|
||||
'stop': 25
|
||||
'start': 8,
|
||||
'stop': 14
|
||||
}
|
||||
},
|
||||
'highlights': {}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
'language': 'python',
|
||||
'content': 'from examples.affinity_workers.worker import affinity_worker_workflow\nfrom examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf\nfrom examples.cancellation.worker import cancellation_workflow\nfrom examples.concurrency_limit.worker import concurrency_limit_workflow\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow\nfrom examples.concurrency_workflow_level.worker import (\n concurrency_workflow_level_workflow,\n)\nfrom examples.dag.worker import dag_workflow\nfrom examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf\nfrom examples.durable.worker import durable_workflow\nfrom examples.fanout.worker import child_wf, parent_wf\nfrom examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent\nfrom examples.lifespans.simple import lifespan, lifespan_task\nfrom examples.logger.workflow import logging_workflow\nfrom examples.non_retryable.worker import non_retryable_workflow\nfrom examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details\nfrom examples.priority.worker import priority_workflow\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \'e2e-test-worker\',\n slots=100,\n workflows=[\n affinity_worker_workflow,\n bulk_child_wf,\n bulk_parent_wf,\n concurrency_limit_workflow,\n concurrency_limit_rr_workflow,\n concurrency_multiple_keys_workflow,\n dag_workflow,\n dedupe_child_wf,\n dedupe_parent_wf,\n durable_workflow,\n child_wf,\n parent_wf,\n on_failure_wf,\n on_failure_wf_with_details,\n logging_workflow,\n timeout_wf,\n refresh_timeout_wf,\n task_condition_workflow,\n cancellation_workflow,\n sync_fanout_parent,\n sync_fanout_child,\n non_retryable_workflow,\n concurrency_workflow_level_workflow,\n priority_workflow,\n lifespan_task,\n ],\n lifespan=lifespan,\n )\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n',
|
||||
'content': 'from examples.affinity_workers.worker import affinity_worker_workflow\nfrom examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf\nfrom examples.cancellation.worker import cancellation_workflow\nfrom examples.concurrency_limit.worker import concurrency_limit_workflow\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow\nfrom examples.concurrency_workflow_level.worker import (\n concurrency_workflow_level_workflow,\n)\nfrom examples.dag.worker import dag_workflow\nfrom examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf\nfrom examples.durable.worker import durable_workflow\nfrom examples.fanout.worker import child_wf, parent_wf\nfrom examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent\nfrom examples.lifespans.simple import lifespan, lifespan_task\nfrom examples.logger.workflow import logging_workflow\nfrom examples.non_retryable.worker import non_retryable_workflow\nfrom examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \'e2e-test-worker\',\n slots=100,\n workflows=[\n affinity_worker_workflow,\n bulk_child_wf,\n bulk_parent_wf,\n concurrency_limit_workflow,\n concurrency_limit_rr_workflow,\n concurrency_multiple_keys_workflow,\n dag_workflow,\n dedupe_child_wf,\n dedupe_parent_wf,\n durable_workflow,\n child_wf,\n parent_wf,\n on_failure_wf,\n on_failure_wf_with_details,\n logging_workflow,\n timeout_wf,\n refresh_timeout_wf,\n task_condition_workflow,\n cancellation_workflow,\n sync_fanout_parent,\n sync_fanout_child,\n non_retryable_workflow,\n concurrency_workflow_level_workflow,\n lifespan_task,\n ],\n lifespan=lifespan,\n )\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n',
|
||||
'source': 'out/python/worker.py',
|
||||
'blocks': {},
|
||||
'highlights': {}
|
||||
|
||||
Reference in New Issue
Block a user