mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2026-04-21 17:28:59 -05:00
Fix: More examples / snippets fixes + rework (#2150)
* feat: start reworking snippets * feat: start cleaning up gen script * fix: start updating refs everywhere * feat: start fixing broken snippet links * fix: more snippets * fix: more updates * chore: lint * fix: taskfile * fix: script * fix: escaping issue + mergent blog * fix: bunch more * chore: lint * fix: implement more of them * fix: retry * fix: the rest * chore: lint * fix: highlight * fix: ugh * fix: start removing dead code from old snippet method * fix: rest of the refs * fix: remove all of the rest of the <GithubSnippet uses * fix: couple more * fix: last few errors * fix: handle example writes * fix: delete to test update * fix: CI, attempt 1 * feat: helpful error on no snippet * fix: lint * chore: rm unused js file * feat: improve GHA * debug: run action on branch * fix: rm pnpm * fix: try to leave comment instead * fix: don't run on branch * fix: factor out GH info * fix: include code path * fix: ts
This commit is contained in:
@@ -23,9 +23,12 @@ jobs:
|
||||
- name: Install dependencies
|
||||
working-directory: frontend/docs
|
||||
run: pnpm install --frozen-lockfile
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.13'
|
||||
- name: Generate snippets
|
||||
working-directory: frontend/snips
|
||||
run: pnpm install --frozen-lockfile && pnpm generate && pnpm run copy:docs
|
||||
working-directory: frontend/snippets
|
||||
run: python3 generate.py
|
||||
- name: Lint
|
||||
working-directory: frontend/docs
|
||||
run: npm run lint:check
|
||||
@@ -48,9 +51,12 @@ jobs:
|
||||
- name: Install dependencies
|
||||
working-directory: frontend/docs
|
||||
run: pnpm install --frozen-lockfile
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.13'
|
||||
- name: Generate snippets
|
||||
working-directory: frontend/snips
|
||||
run: pnpm install --frozen-lockfile && pnpm generate && pnpm run copy:docs
|
||||
working-directory: frontend/snippets
|
||||
run: python3 generate.py
|
||||
- name: Build
|
||||
working-directory: frontend/docs
|
||||
run: npm run build
|
||||
|
||||
@@ -11,20 +11,12 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
node-version: '22'
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9.15.4
|
||||
run_install: false
|
||||
|
||||
- name: Generate
|
||||
working-directory: frontend/snips
|
||||
run: pnpm i && pnpm generate && pnpm run copy:examples
|
||||
python-version: '3.13'
|
||||
- name: Generate snippets
|
||||
working-directory: frontend/snippets
|
||||
run: python3 generate.py
|
||||
|
||||
- name: Check for changes in examples directory
|
||||
id: verify-changed-files
|
||||
@@ -59,6 +51,20 @@ jobs:
|
||||
echo "branch_name=$BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
id: create-branch
|
||||
|
||||
- name: Close existing autogenerated-docs PRs
|
||||
if: steps.verify-changed-files.outputs.changed == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
EXISTING_PRS=$(gh pr list --label "autogenerated-docs" --state open --json number --jq '.[].number')
|
||||
|
||||
for pr in $EXISTING_PRS; do
|
||||
if [ -n "$pr" ]; then
|
||||
echo "Closing existing autogenerated-docs PR #$pr"
|
||||
gh pr close $pr --comment "Closing in favor of newer autogenerated-docs PR"
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Create Pull Request
|
||||
if: steps.verify-changed-files.outputs.changed == 'true'
|
||||
env:
|
||||
@@ -68,14 +74,23 @@ jobs:
|
||||
--title "chore: regenerate examples" \
|
||||
--body "Automated regeneration of examples from the main branch." \
|
||||
--head "${{ steps.create-branch.outputs.branch_name }}" \
|
||||
--base main
|
||||
--base main \
|
||||
--label "autogenerated-docs"
|
||||
echo "pr_number=$(gh pr list --head ${{ steps.create-branch.outputs.branch_name }} --json number --jq '.[0].number')" >> $GITHUB_OUTPUT
|
||||
id: create-pr
|
||||
|
||||
- name: Auto-approve and merge Pull Request
|
||||
- name: Request review from triggering author
|
||||
if: steps.verify-changed-files.outputs.changed == 'true'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh pr review "${{ steps.create-branch.outputs.branch_name }}" --approve
|
||||
gh pr merge "${{ steps.create-branch.outputs.branch_name }}" --squash
|
||||
AUTHOR="${{ github.actor }}"
|
||||
|
||||
if [ "$AUTHOR" != "github-actions[bot]" ]; then
|
||||
gh pr edit "${{ steps.create-branch.outputs.branch_name }}" --add-reviewer "$AUTHOR" || {
|
||||
gh pr comment "${{ steps.create-branch.outputs.branch_name }}" --body "@$AUTHOR Please review this autogenerated PR"
|
||||
}
|
||||
echo "Requested review from $AUTHOR"
|
||||
else
|
||||
echo "Skipping review request as author is github-actions bot"
|
||||
fi
|
||||
|
||||
@@ -38,6 +38,7 @@ async def step3(input: EmptyModel, ctx: Context) -> RandomSum:
|
||||
return RandomSum(sum=one + two)
|
||||
|
||||
|
||||
# 👀 cool thing!
|
||||
@dag_workflow.task(parents=[step1, step3])
|
||||
async def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:
|
||||
print(
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
# > Simple
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -0,0 +1,87 @@
|
||||
import { WorkerLabelComparator } from '@hatchet-dev/typescript-sdk/protoc/workflows';
|
||||
import { hatchet } from '../hatchet-client';
|
||||
|
||||
// > AffinityWorkflow
|
||||
|
||||
const workflow = hatchet.workflow({
|
||||
name: 'affinity-workflow',
|
||||
description: 'test',
|
||||
});
|
||||
|
||||
workflow.task({
|
||||
name: 'step1',
|
||||
fn: async (_, ctx) => {
|
||||
const results: Promise<any>[] = [];
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const result = await ctx.spawnWorkflow(childWorkflow.id, {});
|
||||
results.push(result.output);
|
||||
}
|
||||
console.log('Spawned 50 child workflows');
|
||||
console.log('Results:', await Promise.all(results));
|
||||
|
||||
return { step1: 'step1 results!' };
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
const childWorkflow = hatchet.workflow({
|
||||
name: 'child-affinity-workflow',
|
||||
description: 'test',
|
||||
});
|
||||
|
||||
childWorkflow.task({
|
||||
name: 'child-step1',
|
||||
desiredWorkerLabels: {
|
||||
model: {
|
||||
value: 'xyz',
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
fn: async (ctx) => {
|
||||
return { childStep1: 'childStep1 results!' };
|
||||
}
|
||||
})
|
||||
|
||||
childWorkflow.task({
|
||||
name: 'child-step2',
|
||||
desiredWorkerLabels: {
|
||||
memory: {
|
||||
value: 512,
|
||||
required: true,
|
||||
comparator: WorkerLabelComparator.LESS_THAN,
|
||||
},
|
||||
},
|
||||
fn: async (ctx) => {
|
||||
return { childStep2: 'childStep2 results!' };
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
|
||||
async function main() {
|
||||
// > AffinityWorker
|
||||
|
||||
const worker1 = await hatchet.worker('affinity-worker-1', {
|
||||
labels: {
|
||||
model: 'abc',
|
||||
memory: 1024,
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
await worker1.registerWorkflow(workflow);
|
||||
await worker1.registerWorkflow(childWorkflow);
|
||||
worker1.start();
|
||||
|
||||
const worker2 = await hatchet.worker('affinity-worker-2', {
|
||||
labels: {
|
||||
model: 'xyz',
|
||||
memory: 512,
|
||||
},
|
||||
});
|
||||
await worker2.registerWorkflow(workflow);
|
||||
await worker2.registerWorkflow(childWorkflow);
|
||||
worker2.start();
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -0,0 +1,74 @@
|
||||
import { Logger, LogLevel } from '@hatchet-dev/typescript-sdk/util/logger';
|
||||
import pino from 'pino';
|
||||
import Hatchet from '@hatchet-dev/typescript-sdk/sdk';
|
||||
import { JsonObject } from '@hatchet-dev/typescript-sdk/v1';
|
||||
|
||||
// > Create Pino logger
|
||||
const logger = pino();
|
||||
|
||||
class PinoLogger implements Logger {
|
||||
logLevel: LogLevel;
|
||||
context: string;
|
||||
|
||||
constructor(context: string, logLevel: LogLevel = 'DEBUG') {
|
||||
this.logLevel = logLevel;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
debug(message: string, extra?: JsonObject): void {
|
||||
logger.debug(extra, message);
|
||||
}
|
||||
|
||||
info(message: string, extra?: JsonObject): void {
|
||||
logger.info(extra, message);
|
||||
}
|
||||
|
||||
green(message: string, extra?: JsonObject): void {
|
||||
logger.info(extra, `%c${message}`);
|
||||
}
|
||||
|
||||
warn(message: string, error?: Error, extra?: JsonObject): void {
|
||||
logger.warn(extra, `${message} ${error}`);
|
||||
}
|
||||
|
||||
error(message: string, error?: Error, extra?: JsonObject): void {
|
||||
logger.error(extra, `${message} ${error}`);
|
||||
}
|
||||
|
||||
// optional util method
|
||||
util(key: string, message: string, extra?: JsonObject): void {
|
||||
// for example you may want to expose a trace method
|
||||
if (key === 'trace') {
|
||||
logger.info(extra, 'trace');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const hatchet = Hatchet.init({
|
||||
log_level: 'DEBUG',
|
||||
logger: (ctx, level) => new PinoLogger(ctx, level),
|
||||
});
|
||||
|
||||
|
||||
// > Use the logger
|
||||
|
||||
const workflow = hatchet.task({
|
||||
name: 'byo-logger-example',
|
||||
fn: async (ctx) => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
logger.info(`log message ${i}`);
|
||||
}
|
||||
|
||||
return { step1: 'completed step run' };
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
async function main() {
|
||||
const worker = await hatchet.worker('byo-logger-worker', {
|
||||
workflows: [workflow],
|
||||
});
|
||||
worker.start();
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -0,0 +1,40 @@
|
||||
import { hatchet } from "../hatchet-client";
|
||||
|
||||
const sleep = (ms: number) =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, ms);
|
||||
});
|
||||
|
||||
// > Logger
|
||||
|
||||
const workflow = hatchet.workflow({
|
||||
name: 'logger-example',
|
||||
description: 'test',
|
||||
on: {
|
||||
event: 'user:create',
|
||||
},
|
||||
});
|
||||
|
||||
workflow.task({
|
||||
name: 'logger-step1',
|
||||
fn: async (_, ctx) => {
|
||||
// log in a for loop
|
||||
for (let i = 0; i < 10; i++) {
|
||||
ctx.logger.info(`log message ${i}`);
|
||||
await sleep(200);
|
||||
}
|
||||
|
||||
return { step1: 'completed step run' };
|
||||
},
|
||||
})
|
||||
|
||||
|
||||
async function main() {
|
||||
const worker = await hatchet.worker('logger-worker', {
|
||||
slots: 1,
|
||||
workflows: [workflow],
|
||||
});
|
||||
await worker.start();
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,4 +1,3 @@
|
||||
// > Declaring a Task
|
||||
import sleep from '@hatchet-dev/typescript-sdk/util/sleep';
|
||||
import { hatchet } from '../hatchet-client';
|
||||
|
||||
|
||||
@@ -13,8 +13,7 @@
|
||||
"eslint:fix": "eslint \"{src,apps,libs,test}/**/*.{ts,tsx,js}\" --fix",
|
||||
"prettier:check": "prettier \"src/**/*.{ts,tsx}\" --list-different",
|
||||
"prettier:fix": "prettier \"src/**/*.{ts,tsx}\" --write",
|
||||
"preview": "vite preview",
|
||||
"sync-examples": "cd ../snips/ && pnpm i && pnpm generate"
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@heroicons/react": "^2.2.0",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import React from "react";
|
||||
import { parseDocComments } from "./codeParser";
|
||||
import { Src } from "./codeData";
|
||||
import CodeStyleRender from "./CodeStyleRender";
|
||||
import { Button } from "../ui/button";
|
||||
import {
|
||||
@@ -11,6 +10,13 @@ import {
|
||||
UnfoldVertical,
|
||||
} from "lucide-react";
|
||||
|
||||
type Src = {
|
||||
raw: string;
|
||||
codePath?: string;
|
||||
githubUrl?: string;
|
||||
language?: string;
|
||||
};
|
||||
|
||||
interface CodeRendererProps {
|
||||
source: Src;
|
||||
target?: string;
|
||||
@@ -23,27 +29,28 @@ export const CodeBlock = ({ source, target }: CodeRendererProps) => {
|
||||
|
||||
const parsed = parseDocComments(source.raw, target, collapsed);
|
||||
|
||||
const canCollapse = source.raw.includes("// ...") || source.raw.includes("# ...")
|
||||
const canCollapse =
|
||||
source.raw.includes("// ...") || source.raw.includes("# ...");
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="z-10 bg-background flex flex-row gap-4 justify-between items-center pl-2 mb-2">
|
||||
<div className="flex flex-row gap-2">
|
||||
{source.githubUrl && (
|
||||
{source.githubUrl && (
|
||||
<a
|
||||
href={source.githubUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="text-xs text-gray-500 font-mono hover:underline"
|
||||
>
|
||||
{source.props?.path}
|
||||
</a>
|
||||
)}
|
||||
>
|
||||
{source.codePath}
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex gap-2 justify-end">
|
||||
{canCollapse && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
{canCollapse && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => setCollapsed(!collapsed)}
|
||||
>
|
||||
@@ -58,11 +65,11 @@ export const CodeBlock = ({ source, target }: CodeRendererProps) => {
|
||||
Collapse
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
onClick={() => {
|
||||
navigator.clipboard.writeText(parsed);
|
||||
setCopied(true);
|
||||
@@ -101,20 +108,30 @@ export const CodeBlock = ({ source, target }: CodeRendererProps) => {
|
||||
|
||||
<div className="flex flex-row mt-2 justify-between">
|
||||
<div className="flex gap-4">
|
||||
{source.githubUrl && <a href={source.githubUrl} target="_blank" rel="noopener noreferrer">
|
||||
<Button variant="outline" size="sm" className="flex flex-row gap-2">
|
||||
<svg
|
||||
height="16"
|
||||
width="16"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
{source.githubUrl && (
|
||||
<a
|
||||
href={source.githubUrl}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="sm"
|
||||
className="flex flex-row gap-2"
|
||||
>
|
||||
<path d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z" />
|
||||
</svg>
|
||||
View Full Code Example on GitHub{" "}
|
||||
<MoveUpRight className="w-3 h-3" />
|
||||
</Button>
|
||||
</a>}
|
||||
<svg
|
||||
height="16"
|
||||
width="16"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
>
|
||||
<path d="M8 0C3.58 0 0 3.58 0 8c0 3.54 2.29 6.53 5.47 7.59.4.07.55-.17.55-.38 0-.19-.01-.82-.01-1.49-2.01.37-2.53-.49-2.69-.94-.09-.23-.48-.94-.82-1.13-.28-.15-.68-.52-.01-.53.63-.01 1.08.58 1.23.82.72 1.21 1.87.87 2.33.66.07-.52.28-.87.51-1.07-1.78-.2-3.64-.89-3.64-3.95 0-.87.31-1.59.82-2.15-.08-.2-.36-1.02.08-2.12 0 0 .67-.21 2.2.82.64-.18 1.32-.27 2-.27.68 0 1.36.09 2 .27 1.53-1.04 2.2-.82 2.2-.82.44 1.1.16 1.92.08 2.12.51.56.82 1.27.82 2.15 0 3.07-1.87 3.75-3.65 3.95.29.25.54.73.54 1.48 0 1.07-.01 1.93-.01 2.2 0 .21.15.46.55.38A8.013 8.013 0 0016 8c0-4.42-3.58-8-8-8z" />
|
||||
</svg>
|
||||
View Full Code Example on GitHub{" "}
|
||||
<MoveUpRight className="w-3 h-3" />
|
||||
</Button>
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex gap-4">
|
||||
<Button
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import React from "react";
|
||||
import { useData } from "nextra/hooks";
|
||||
import { CodeBlock } from "./CodeBlock";
|
||||
import { RepoProps, Src } from "./codeData";
|
||||
|
||||
interface GithubSnippetProps {
|
||||
src: RepoProps;
|
||||
target: string;
|
||||
}
|
||||
|
||||
export const GithubSnippet = ({ src, target }: GithubSnippetProps) => {
|
||||
const { contents } = useData();
|
||||
const snippet = contents.find((c) => c.rawUrl.endsWith(src.path)) as Src;
|
||||
|
||||
if (!snippet) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<CodeBlock
|
||||
source={{
|
||||
...src,
|
||||
...snippet,
|
||||
}}
|
||||
target={target}
|
||||
/>
|
||||
);
|
||||
};
|
||||
@@ -1,62 +1,39 @@
|
||||
import React, { useMemo } from "react";
|
||||
import React from "react";
|
||||
import { CodeBlock } from "./CodeBlock";
|
||||
import { Snippet as SnippetType } from "@/lib/snips";
|
||||
import { type Snippet as SnippetType } from "@/lib/snippet";
|
||||
|
||||
interface SnippetProps {
|
||||
src: SnippetType;
|
||||
block?: keyof SnippetType["blocks"] | "ALL";
|
||||
}
|
||||
type Language = SnippetType["language"];
|
||||
|
||||
const languageMap = {
|
||||
typescript: "ts",
|
||||
python: "py",
|
||||
go: "go",
|
||||
unknown: "txt",
|
||||
// See the list of supported languages for how to define these:
|
||||
// https://highlightjs.readthedocs.io/en/latest/supported-languages.html
|
||||
const languageToHighlightAbbreviation = (language: Language) => {
|
||||
switch (language) {
|
||||
case "python":
|
||||
return "py";
|
||||
case "typescript":
|
||||
return "ts";
|
||||
case "go":
|
||||
return "go";
|
||||
default:
|
||||
const exhaustiveCheck: never = language;
|
||||
throw new Error(`Unsupported language: ${exhaustiveCheck}`);
|
||||
}
|
||||
};
|
||||
|
||||
// This is a server component that will be rendered at build time
|
||||
export const Snippet = ({ src, block }: SnippetProps) => {
|
||||
if (!src.content) {
|
||||
throw new Error(`src content is required: ${src.source}`);
|
||||
export const Snippet = ({ src }: { src: SnippetType }) => {
|
||||
if (src === undefined) {
|
||||
throw new Error(
|
||||
"Snippet was undefined. You probably provided a path to a snippet that doesn't exist."
|
||||
);
|
||||
}
|
||||
|
||||
const language = useMemo(() => {
|
||||
const normalizedLanguage = src.language?.toLowerCase().trim();
|
||||
if (normalizedLanguage && normalizedLanguage in languageMap) {
|
||||
return languageMap[normalizedLanguage as keyof typeof languageMap];
|
||||
}
|
||||
return "txt";
|
||||
}, [src.language]);
|
||||
|
||||
let content = src.content;
|
||||
|
||||
if (block && block !== "ALL" && src.blocks) {
|
||||
if (!(block in src.blocks)) {
|
||||
throw new Error(
|
||||
`Block ${block} not found in ${src.source} ${JSON.stringify(src.blocks, null, 2)}`
|
||||
);
|
||||
}
|
||||
|
||||
const lines = src.content.split("\n");
|
||||
content = lines
|
||||
.slice(src.blocks[block].start - 1, src.blocks[block].stop)
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
const fixedSource = src.source.replace("out/", "examples/");
|
||||
|
||||
return (
|
||||
<>
|
||||
<CodeBlock
|
||||
source={{
|
||||
githubUrl: `https://github.com/hatchet-dev/hatchet/blob/main/${fixedSource}`,
|
||||
raw: content || "",
|
||||
language: language,
|
||||
props: {
|
||||
path: fixedSource,
|
||||
},
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
<CodeBlock
|
||||
source={{
|
||||
githubUrl: src.githubUrl,
|
||||
raw: src.content,
|
||||
language: languageToHighlightAbbreviation(src.language),
|
||||
codePath: src.codePath,
|
||||
}}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,99 +0,0 @@
|
||||
const defaultUser = "hatchet-dev";
|
||||
|
||||
const defaultRepo = "hatchet";
|
||||
|
||||
const localPaths: Record<string, string> = {
|
||||
ts: "sdks/typescript",
|
||||
py: "sdks/python",
|
||||
go: "",
|
||||
};
|
||||
|
||||
export const extToLanguage: Record<string, string> = {
|
||||
ts: "typescript",
|
||||
py: "python",
|
||||
go: "go",
|
||||
};
|
||||
|
||||
const defaultBranch = "main";
|
||||
|
||||
export type RepoProps = {
|
||||
user?: string;
|
||||
repo?: string;
|
||||
branch?: string;
|
||||
path?: string;
|
||||
};
|
||||
|
||||
const getLocalUrl = (ext: string | undefined, { path }: RepoProps) => {
|
||||
if (!ext) return `http://localhost:4001/${path}`;
|
||||
return `http://localhost:4001/${localPaths[ext] || ''}/${path}`;
|
||||
};
|
||||
|
||||
const isDev = process?.env?.NODE_ENV === "development";
|
||||
|
||||
const getRawUrl = ({ user, repo, branch, path }: RepoProps) => {
|
||||
const ext = path?.split(".").pop();
|
||||
if (isDev) {
|
||||
return getLocalUrl(ext, { path });
|
||||
}
|
||||
const extPath = ext && localPaths[ext] ? `${localPaths[ext]}/` : '';
|
||||
return `https://raw.githubusercontent.com/${user || defaultUser}/${repo || defaultRepo}/refs/heads/${branch || defaultBranch}/${extPath}${path}`;
|
||||
};
|
||||
|
||||
const getUIUrl = ({ user, repo, branch, path }: RepoProps) => {
|
||||
const ext = path?.split(".").pop();
|
||||
if (isDev) {
|
||||
return getLocalUrl(ext, { path });
|
||||
}
|
||||
const extPath = ext && localPaths[ext] ? `${localPaths[ext]}/` : '';
|
||||
return `https://github.com/${user || defaultUser}/${repo || defaultRepo}/blob/${branch || defaultBranch}/${extPath}${path}`;
|
||||
};
|
||||
|
||||
export type Src = {
|
||||
raw: string;
|
||||
props?: RepoProps;
|
||||
rawUrl?: string;
|
||||
githubUrl?: string;
|
||||
language?: string;
|
||||
};
|
||||
export const getSnippets = (
|
||||
props: RepoProps[]
|
||||
): Promise<{ props: { ssg: { contents: Src[] } } }> => {
|
||||
return Promise.all(
|
||||
props.map(async (prop) => {
|
||||
const rawUrl = getRawUrl(prop);
|
||||
const githubUrl = getUIUrl(prop);
|
||||
const fileExt = prop.path?.split(".").pop() as keyof typeof extToLanguage;
|
||||
const language = extToLanguage[fileExt];
|
||||
|
||||
try {
|
||||
const response = await fetch(rawUrl);
|
||||
const raw = await response.text();
|
||||
|
||||
return {
|
||||
raw,
|
||||
props: prop,
|
||||
rawUrl,
|
||||
githubUrl,
|
||||
language,
|
||||
};
|
||||
} catch (error) {
|
||||
// Return object with empty raw content but preserve URLs on failure
|
||||
return {
|
||||
raw: "",
|
||||
props: prop,
|
||||
rawUrl,
|
||||
githubUrl,
|
||||
language,
|
||||
};
|
||||
}
|
||||
})
|
||||
).then((results) => ({
|
||||
props: {
|
||||
ssg: {
|
||||
contents: results,
|
||||
},
|
||||
// revalidate every 60 seconds
|
||||
revalidate: 60,
|
||||
},
|
||||
}));
|
||||
};
|
||||
@@ -1,5 +1,3 @@
|
||||
export * from './GithubSnippet'
|
||||
export * from './Snippet'
|
||||
export * from './CodeBlock'
|
||||
export * from './codeData'
|
||||
export * from './codeParser'
|
||||
export * from "./Snippet";
|
||||
export * from "./CodeBlock";
|
||||
export * from "./codeParser";
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
export type Snippet = {
|
||||
title: string;
|
||||
content: string;
|
||||
githubUrl: string;
|
||||
codePath: string;
|
||||
language: 'python' | 'typescript' | 'go'
|
||||
};
|
||||
@@ -1,5 +0,0 @@
|
||||
import * as snippets from './generated/snips';
|
||||
import { Snippet as SnippetType } from './generated/snips/types';
|
||||
|
||||
export type Snippet = SnippetType;
|
||||
export default snippets;
|
||||
@@ -1,68 +0,0 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
|
||||
// Function to recursively find all MDX files
|
||||
function findMdxFiles(dir) {
|
||||
const files = fs.readdirSync(dir);
|
||||
let mdxFiles = [];
|
||||
|
||||
for (const file of files) {
|
||||
const filePath = path.join(dir, file);
|
||||
const stat = fs.statSync(filePath);
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
mdxFiles = mdxFiles.concat(findMdxFiles(filePath));
|
||||
} else if (file.endsWith('.mdx')) {
|
||||
mdxFiles.push(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
return mdxFiles;
|
||||
}
|
||||
|
||||
// Function to process a single MDX file
|
||||
function processMdxFile(filePath) {
|
||||
let content = fs.readFileSync(filePath, 'utf8');
|
||||
let modified = false;
|
||||
|
||||
// Regular expression to match Snippet components
|
||||
// This will match patterns like <Snippet src={snips.typescript.simple.worker} />
|
||||
const snippetRegex = /<Snippet\s+src={([^}]+)}\s*\/>/g;
|
||||
|
||||
// Replace matches with the new format
|
||||
const newContent = content.replace(snippetRegex, (match, src) => {
|
||||
// Extract the last part of the path for the block attribute
|
||||
const pathParts = src.split('.');
|
||||
const blockName = pathParts[pathParts.length - 1];
|
||||
// Remove the last part from the src
|
||||
const newSrc = pathParts.slice(0, -1).join('.');
|
||||
|
||||
modified = true;
|
||||
return `<Snippet src={${newSrc}} block="${blockName}" />`;
|
||||
});
|
||||
|
||||
if (modified) {
|
||||
fs.writeFileSync(filePath, newContent, 'utf8');
|
||||
console.log(`Updated ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Main function
|
||||
function main() {
|
||||
const docsDir = path.join(__dirname, 'pages');
|
||||
const mdxFiles = findMdxFiles(docsDir);
|
||||
|
||||
console.log(`Found ${mdxFiles.length} MDX files to process`);
|
||||
|
||||
for (const file of mdxFiles) {
|
||||
processMdxFile(file);
|
||||
}
|
||||
|
||||
console.log('Migration complete!');
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -13,7 +13,7 @@
|
||||
"lint:fix": "npm run prettier:fix",
|
||||
"prettier:check": "prettier \"pages/**/*.{tsx,mdx}\" --list-different",
|
||||
"prettier:fix": "prettier \"pages/**/*.{tsx,mdx,js}\" --write",
|
||||
"generate-examples": "cd ../snips/ && pnpm i && pnpm generate && pnpm run copy:docs"
|
||||
"generate-examples": "cd ../snippets/ && python3 generate.py"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -2,8 +2,6 @@ import DynamicLottie from "../../components/DynamicLottie";
|
||||
import { LogViewer } from "../../components/LogViewer";
|
||||
import * as prefetch from "./_celery_prefetch.json";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
|
||||
# **Background Tasks: From FastAPI to Hatchet**
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Callout, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
import InstallCommand from "@/components/InstallCommand";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
# Mergent Migration Guide
|
||||
|
||||
@@ -38,7 +38,7 @@ It is recommended to instantiate a shared Hatchet Client in a separate file as a
|
||||
|
||||
Create a new file called `hatchet-client.ts` in your project root.
|
||||
|
||||
<Snippet src={snips.typescript.hatchet_client} block="ALL" />
|
||||
<Snippet src={snippets.typescript.hatchet_client.all} />
|
||||
|
||||
You can now import the Hatchet Client in any file that needs it.
|
||||
|
||||
@@ -113,32 +113,29 @@ Let's look at an example of converting a Mergent task to Hatchet. We'll use an i
|
||||
<UniversalTabs items={["Typescript", "Python", "Go"]}>
|
||||
<Tabs.Tab title="Typescript">
|
||||
#### Before (Mergent)
|
||||
<Snippet src={snips.typescript.migration_guides.mergent} block="before_mergent" />
|
||||
<Snippet src={snippets.typescript.migration_guides.mergent.before_mergent} />
|
||||
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.migration_guides.mergent}
|
||||
block="after_hatchet"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.migration_guides.mergent.after_hatchet} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Python">
|
||||
#### Before (Mergent)
|
||||
<Snippet src={snips.python.migration_guides.mergent} block="before_mergent" />
|
||||
<Snippet src={snippets.python.migration_guides.mergent.before_mergent} />
|
||||
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet src={snips.python.migration_guides.mergent} block="after_hatchet" />
|
||||
<Snippet src={snippets.python.migration_guides.mergent.after_hatchet} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
#### Before (Mergent)
|
||||
<Snippet src={snips.go.migration_guides.mergent} block="before_mergent" />
|
||||
<Snippet src={snippets.go.migration_guides.mergent.before_mergent} />
|
||||
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet src={snips.go.migration_guides.mergent} block="after_hatchet" />
|
||||
<Snippet src={snippets.go.migration_guides.mergent.after_hatchet} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -162,40 +159,34 @@ In Hatchet the primary way of triggering tasks is via the SDK. This offers a ful
|
||||
#### Before (Mergent)
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.migration_guides.mergent}
|
||||
block="running_a_task_mergent"
|
||||
src={snippets.typescript.migration_guides.mergent.running_a_task_mergent}
|
||||
/>
|
||||
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.migration_guides.mergent}
|
||||
block="running_a_task_hatchet"
|
||||
src={snippets.typescript.migration_guides.mergent.running_a_task_hatchet}
|
||||
/>
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Python">
|
||||
#### Before (Mergent)
|
||||
<Snippet src={snips.python.migration_guides.mergent} block="running_a_task_mergent" />
|
||||
<Snippet src={snippets.python.migration_guides.mergent.running_a_task_mergent} />
|
||||
|
||||
The primary way of triggering tasks in Hatchet is via the SDK.
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet
|
||||
src={snips.python.migration_guides.mergent}
|
||||
block="running_a_task_hatchet"
|
||||
src={snippets.python.migration_guides.mergent.running_a_task_hatchet}
|
||||
/>
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
#### Before (Mergent)
|
||||
<Snippet src={snips.go.migration_guides.mergent} block="running_a_task_mergent" />
|
||||
<Snippet src={snippets.go.migration_guides.mergent.running_a_task_mergent} />
|
||||
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet
|
||||
src={snips.go.migration_guides.mergent}
|
||||
block="running_a_task_hatchet"
|
||||
/>
|
||||
<Snippet src={snippets.go.migration_guides.mergent.running_a_task_hatchet} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -209,25 +200,25 @@ You can do the same in the Hatchet dashboard or programmatically using the SDK:
|
||||
<UniversalTabs items={["Typescript", "Python", "Go"]}>
|
||||
<Tabs.Tab title="Typescript">
|
||||
#### Before (Mergent)
|
||||
<Snippet src={snips.typescript.migration_guides.mergent} block="scheduling_tasks_mergent" />
|
||||
<Snippet src={snippets.typescript.migration_guides.mergent.scheduling_tasks_mergent} />
|
||||
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet src={snips.typescript.migration_guides.mergent} block="scheduling_tasks_hatchet" />
|
||||
<Snippet src={snippets.typescript.migration_guides.mergent.scheduling_tasks_hatchet} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Python">
|
||||
#### Before (Mergent)
|
||||
<Snippet src={snips.python.migration_guides.mergent} block="scheduling_tasks_mergent" />
|
||||
<Snippet src={snippets.python.migration_guides.mergent.scheduling_tasks_mergent} />
|
||||
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet src={snips.python.migration_guides.mergent} block="scheduling_tasks_hatchet" />
|
||||
<Snippet src={snippets.python.migration_guides.mergent.scheduling_tasks_hatchet} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
#### After (Hatchet)
|
||||
|
||||
<Snippet src={snips.go.migration_guides.mergent} block="scheduling_tasks_hatchet" />
|
||||
<Snippet src={snippets.go.migration_guides.mergent.scheduling_tasks_hatchet} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import DynamicLottie from "../../components/DynamicLottie";
|
||||
import * as prefetch from "./_celery_prefetch.json";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
# **A task queue for modern Python applications**
|
||||
|
||||
@@ -43,11 +43,11 @@ Hatchet's Python SDK has drawn inspiration from FastAPI and is similarly a Pydan
|
||||
|
||||
When working with Hatchet, you can define inputs and outputs of your tasks as Pydantic models, which the SDK will then serialize and deserialize for you internally. This means that you can write a task like this:
|
||||
|
||||
<Snippet src={snips.python.child.worker} block="simple" />
|
||||
<Snippet src={snippets.python.child.worker.simple} />
|
||||
|
||||
In this example, we've defined a single Hatchet task that takes a Pydantic model as input, and returns a Pydantic model as output. This means that if you want to trigger this task from somewhere else in your codebase, you can do something like this:
|
||||
|
||||
<Snippet src={snips.python.child.trigger} block="running_a_task" />
|
||||
<Snippet src={snippets.python.child.trigger.running_a_task} />
|
||||
|
||||
The different flavors of `.run` methods are type-safe: The input is typed and can be statically type checked, and is also validated by Pydantic at runtime. This means that when triggering tasks, you don't need to provide a set of untyped positional or keyword arguments, like you might if using Celery.
|
||||
|
||||
@@ -57,7 +57,7 @@ The different flavors of `.run` methods are type-safe: The input is typed and ca
|
||||
|
||||
You can also _schedule_ a task for the future (similar to Celery's `eta` or `countdown` features) using the `.schedule` method:
|
||||
|
||||
<Snippet src={snips.python.child.trigger} block="schedule_a_task" />
|
||||
<Snippet src={snippets.python.child.trigger.schedule_a_task} />
|
||||
|
||||
Importantly, Hatchet will not hold scheduled tasks in memory, so it's perfectly safe to schedule tasks for arbitrarily far in the future.
|
||||
|
||||
@@ -65,7 +65,7 @@ Importantly, Hatchet will not hold scheduled tasks in memory, so it's perfectly
|
||||
|
||||
Finally, Hatchet also has first-class support for cron jobs. You can either create crons dynamically:
|
||||
|
||||
<Snippet src={snips.python.cron.programatic_async} block="create" />
|
||||
<Snippet src={snippets.python.cron.programatic_sync.create} />
|
||||
|
||||
Or you can define them declaratively when you create your workflow:
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { LogViewer } from "../../components/LogViewer";
|
||||
import * as prefetch from "./_celery_prefetch.json";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
# **Warning! The Event Loop May Be Blocked**
|
||||
|
||||
@@ -48,11 +48,11 @@ Importantly, using e.g. `requests.get` to make an API call also (confusingly) fa
|
||||
|
||||
Let's give a simple example, which we'll come back to later as a helpful debugging strategy. We'll first write two functions:
|
||||
|
||||
<Snippet src={snips.python.blocked_async.debugging} block="functions" />
|
||||
<Snippet src={snippets.python.blocked_async.debugging.functions} />
|
||||
|
||||
And let's run these concurrently with `asyncio.gather` and `asyncio.create_task`:
|
||||
|
||||
<Snippet src={snips.python.blocked_async.debugging} block="blocked" />
|
||||
<Snippet src={snippets.python.blocked_async.debugging.blocked} />
|
||||
|
||||
If you run this code, you'll see logs like this:
|
||||
|
||||
@@ -67,7 +67,7 @@ Non-blocking 2
|
||||
|
||||
On the other hand, you can run two tasks running the non-blocking function concurrently as you'd expect:
|
||||
|
||||
<Snippet src={snips.python.blocked_async.debugging} block="unblocked" />
|
||||
<Snippet src={snippets.python.blocked_async.debugging.unblocked} />
|
||||
|
||||
Which results in the logs below. Note that the output from the two tasks, `A` and `B`, are interleaved, indicating that they're correctly running concurrently.
|
||||
|
||||
@@ -94,19 +94,13 @@ Some common (and some less common) examples of blocking operations might include
|
||||
|
||||
In each of these cases, while this work is happening, no other async work on your Hatchet workers will be able to progress. We see some interesting and scary behavior if we run some blocking code in Hatchet. We'll share some ideas for how to work around each of these blocking operations below.
|
||||
|
||||
<Snippet
|
||||
src={snips.python.blocked_async.blocking_example_worker}
|
||||
block="worker"
|
||||
/>
|
||||
<Snippet src={snippets.python.blocked_async.blocking_example_worker.worker} />
|
||||
|
||||
Here we define a few tasks, one which is async and does blocking work (`time.sleep`), one which is sync and does blocking work (`time.sleep`), and one that is async and does non-blocking work (`asyncio.sleep`).
|
||||
|
||||
As an experiment, we can run them as follows to simulate what might happen in a production environment:
|
||||
|
||||
<Snippet
|
||||
src={snips.python.blocked_async.blocking_example_trigger}
|
||||
block="trigger"
|
||||
/>
|
||||
<Snippet src={snippets.python.blocked_async.blocking_example_trigger.trigger} />
|
||||
|
||||
The intention of this example is to first kick off the non-blocking sync and async tasks, let them start to process, then kick off the blocking task, let it start to process, and finally kick off the non-blocking sync task again, and then let all of them complete. The worker logs are illustrative:
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
# Bulk Cancellations and Replays
|
||||
|
||||
@@ -28,15 +28,15 @@ The first way to bulk cancel or replay runs is by providing a list of run ids. T
|
||||
|
||||
First, we'll start by fetching a task via the REST API.
|
||||
|
||||
<Snippet src={snips.python.bulk_operations.cancel} block="setup" />
|
||||
<Snippet src={snippets.python.bulk_operations.cancel.setup} />
|
||||
|
||||
Now that we have a task, we'll get runs for it, so that we can use them to bulk cancel by run id.
|
||||
|
||||
<Snippet src={snips.python.bulk_operations.cancel} block="list_runs" />
|
||||
<Snippet src={snippets.python.bulk_operations.cancel.list_runs} />
|
||||
|
||||
And finally, we can cancel the runs in bulk.
|
||||
|
||||
<Snippet src={snips.python.bulk_operations.cancel} block="cancel_by_run_ids" />
|
||||
<Snippet src={snippets.python.bulk_operations.cancel.cancel_by_run_ids} />
|
||||
|
||||
<Callout type="info">
|
||||
Note that the Python SDK also exposes async versions of each of these methods:
|
||||
@@ -59,15 +59,15 @@ The first way to bulk cancel or replay runs is by providing a list of run ids. T
|
||||
|
||||
First, we'll start by fetching a task via the REST API.
|
||||
|
||||
<Snippet src={snips.go.bulk_operations.main} block="setup" />
|
||||
<Snippet src={snippets.python.bulk_operations.cancel.setup} />
|
||||
|
||||
Now that we have a task, we'll get runs for it, so that we can use them to bulk cancel by run id.
|
||||
|
||||
<Snippet src={snips.go.bulk_operations.main} block="list_runs" />
|
||||
<Snippet src={snippets.python.bulk_operations.cancel.list_runs} />
|
||||
|
||||
And finally, we can cancel the runs in bulk.
|
||||
|
||||
<Snippet src={snips.go.bulk_operations.main} block="cancel_by_run_ids" />
|
||||
<Snippet src={snippets.python.bulk_operations.cancel.cancel_by_run_ids} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -82,7 +82,7 @@ The second way to bulk cancel or replay runs is by providing a list of filters.
|
||||
|
||||
The example below provides some filters you might use to cancel or replay runs in bulk. Importantly, these filters are very similar to the filters you can use in the Hatchet Dashboard to filter which task runs are displaying.
|
||||
|
||||
<Snippet src={snips.python.bulk_operations.cancel} block="cancel_by_filters" />
|
||||
<Snippet src={snippets.python.bulk_operations.cancel.cancel_by_filters} />
|
||||
|
||||
Running this request will cancel all task runs matching the filters provided.
|
||||
|
||||
@@ -94,7 +94,7 @@ The second way to bulk cancel or replay runs is by providing a list of filters.
|
||||
|
||||
The example below provides some filters you might use to cancel or replay runs in bulk. Importantly, these filters are very similar to the filters you can use in the Hatchet Dashboard to filter which task runs are displaying.
|
||||
|
||||
<Snippet src={snips.go.bulk_operations.main} block="cancel_by_filters" />
|
||||
<Snippet src={snippets.python.bulk_operations.cancel.cancel_by_filters} />
|
||||
|
||||
Running this request will cancel all task runs matching the filters provided.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
# Bulk Run Many Tasks
|
||||
|
||||
@@ -12,7 +12,7 @@ Often you may want to run a task multiple times with different inputs. There is
|
||||
|
||||
You can use the `aio_run_many` method to bulk run a task. This will return a list of results.
|
||||
|
||||
<Snippet src={snips.python.child.bulk} block="bulk_run_a_task" />
|
||||
<Snippet src={snippets.python.child.bulk.bulk_run_a_task} />
|
||||
|
||||
<Callout type="info">
|
||||
`Workflow.create_bulk_run_item` is a typed helper to create the inputs for
|
||||
@@ -36,7 +36,7 @@ As with the run methods, you can call bulk methods from within a task and the ru
|
||||
|
||||
You can use the `run` method directly to bulk run tasks by passing an array of inputs. This will return a list of results.
|
||||
|
||||
<Snippet src={snips.typescript.simple.bulk} block="bulk_run_a_task" />
|
||||
<Snippet src={snippets.typescript.simple.bulk.bulk_run_a_task} />
|
||||
|
||||
There are additional bulk methods available on the `Task` object.
|
||||
|
||||
@@ -46,8 +46,7 @@ There are additional bulk methods available on the `Task` object.
|
||||
As with the run methods, you can call bulk methods on the task fn context parameter within a task and the runs will be associated with the parent task in the dashboard.
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.simple.bulk}
|
||||
block="bulk_run_tasks_from_within_a_task"
|
||||
src={snippets.typescript.simple.bulk.bulk_run_tasks_from_within_a_task}
|
||||
/>
|
||||
|
||||
Available bulk methods on the `Context` object are: - `bulkRunChildren` - `bulkRunChildrenNoWait`
|
||||
@@ -57,7 +56,7 @@ Available bulk methods on the `Context` object are: - `bulkRunChildren` - `bulkR
|
||||
|
||||
You can use the `RunBulkNoWait` method directly on the `Task` object to bulk run tasks by passing an array of inputs. This will return a list of run IDs.
|
||||
|
||||
<Snippet src={snips.go.run.bulk} block="bulk_run_tasks" />
|
||||
<Snippet src={snippets.go.run.bulk.bulk_run_tasks} />
|
||||
|
||||
Additional bulk methods are coming soon for the Go SDK. Join our [Discord](https://hatchet.run/discord) to stay up to date.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
# Cancellation in Hatchet Tasks
|
||||
|
||||
@@ -14,26 +14,28 @@ When a task is canceled, Hatchet sends a cancellation signal to the task. The ta
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet
|
||||
src={snips.python.cancellation.worker}
|
||||
block="checking_exit_flag"
|
||||
src={snippets.python.cancellation.worker.checking_exit_flag}
|
||||
|
||||
/>
|
||||
<Snippet
|
||||
src={snips.python.cancellation.worker}
|
||||
block="self_cancelling_task"
|
||||
src={snippets.python.cancellation.worker.self_cancelling_task}
|
||||
|
||||
/>
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.cancellations.workflow}
|
||||
block="declaring_a_task"
|
||||
src={snippets.typescript.cancellations.workflow.declaring_a_task}
|
||||
|
||||
/>
|
||||
<Snippet
|
||||
src={snips.typescript.cancellations.workflow}
|
||||
block="abort_signal"
|
||||
src={snippets.typescript.cancellations.workflow.abort_signal}
|
||||
|
||||
/>
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.cancellations} block="cancelled_task" />
|
||||
<Snippet src={snippets.go.workflows.cancellations.cancelled_task}/>
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
# Procedural Child Task Spawning
|
||||
|
||||
@@ -21,11 +21,11 @@ To implement child task spawning, you first need to create both parent and child
|
||||
|
||||
First, we'll declare a couple of tasks for the parent and child:
|
||||
|
||||
<Snippet src={snips.python.fanout.worker} block="fanoutparent" />
|
||||
<Snippet src={snippets.python.fanout.worker.fanout_parent} />
|
||||
|
||||
We also created a step on the parent task that spawns the child tasks. Now, we'll add a couple of steps to the child task:
|
||||
|
||||
<Snippet src={snips.python.fanout.worker} block="fanoutchild" />
|
||||
<Snippet src={snippets.python.fanout.worker.fanout_child} />
|
||||
|
||||
And that's it! The fanout parent will run and spawn the child, and then will collect the results from its steps.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
import { Snippet } from "@/components/code";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
# Concurrency Control in Hatchet Tasks
|
||||
|
||||
@@ -40,20 +40,23 @@ In addition to setting concurrency limits at the task level, you can also contro
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab>
|
||||
<Snippet
|
||||
src={snips.python.concurrency_limit_rr.worker}
|
||||
block="concurrency_strategy_with_key"
|
||||
src={
|
||||
snippets.python.concurrency_limit_rr.worker
|
||||
.concurrency_strategy_with_key
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet
|
||||
src={snips.typescript.concurrency_rr.workflow}
|
||||
block="concurrency_strategy_with_key"
|
||||
src={
|
||||
snippets.typescript.concurrency_rr.workflow
|
||||
.concurrency_strategy_with_key
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet
|
||||
src={snips.go.workflows.concurrency_rr}
|
||||
block="concurrency_strategy_with_key"
|
||||
src={snippets.go.workflows.concurrency_rr.concurrency_strategy_with_key}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -125,20 +128,22 @@ You can also combine multiple concurrency strategies to create a more complex co
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab>
|
||||
<Snippet
|
||||
src={snips.python.concurrency_workflow_level.worker}
|
||||
block="multiple_concurrency_keys"
|
||||
src={
|
||||
snippets.python.concurrency_workflow_level.worker
|
||||
.multiple_concurrency_keys
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet
|
||||
src={snips.typescript.concurrency_rr.workflow}
|
||||
block="multiple_concurrency_keys"
|
||||
src={
|
||||
snippets.typescript.concurrency_rr.workflow.multiple_concurrency_keys
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet
|
||||
src={snips.go.workflows.concurrency_rr}
|
||||
block="multiple_concurrency_keys"
|
||||
src={snippets.go.workflows.concurrency_rr.multiple_concurrency_keys}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -69,19 +69,18 @@ To get started, let's declare the workflow.
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.conditions.worker} block="create_a_workflow" />
|
||||
<Snippet src={snippets.python.conditions.worker.create_a_workflow} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.dag_match_condition.complex_workflow}
|
||||
block="create_a_workflow"
|
||||
src={
|
||||
snippets.typescript.dag_match_condition.complex_workflow
|
||||
.create_a_workflow
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.complex_conditions}
|
||||
block="create_a_workflow"
|
||||
/>
|
||||
<Snippet src={snippets.go.workflows.complex_conditions.create_a_workflow} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -89,19 +88,17 @@ Next, we'll start adding tasks to our workflow. First, we'll add a basic task th
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.conditions.worker} block="add_base_task" />
|
||||
<Snippet src={snippets.python.conditions.worker.add_base_task} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.dag_match_condition.complex_workflow}
|
||||
block="add_base_task"
|
||||
src={
|
||||
snippets.typescript.dag_match_condition.complex_workflow.add_base_task
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.complex_conditions}
|
||||
block="add_base_task"
|
||||
/>
|
||||
<Snippet src={snippets.go.workflows.complex_conditions.add_base_task} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -109,18 +106,19 @@ Next, we'll add a task to the workflow that's a child of the first task, but it
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.conditions.worker} block="add_wait_for_sleep" />
|
||||
<Snippet src={snippets.python.conditions.worker.add_wait_for_sleep} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.dag_match_condition.complex_workflow}
|
||||
block="add_wait_for_sleep"
|
||||
src={
|
||||
snippets.typescript.dag_match_condition.complex_workflow
|
||||
.add_wait_for_sleep
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.complex_conditions}
|
||||
block="add_wait_for_sleep"
|
||||
src={snippets.go.workflows.complex_conditions.add_wait_for_sleep}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -131,19 +129,18 @@ Next, we'll add a task that will be skipped on an event:
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.conditions.worker} block="add_skip_on_event" />
|
||||
<Snippet src={snippets.python.conditions.worker.add_skip_on_event} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.dag_match_condition.complex_workflow}
|
||||
block="add_skip_on_event"
|
||||
src={
|
||||
snippets.typescript.dag_match_condition.complex_workflow
|
||||
.add_skip_on_event
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.complex_conditions}
|
||||
block="add_skip_on_event"
|
||||
/>
|
||||
<Snippet src={snippets.go.workflows.complex_conditions.add_skip_on_event} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -153,19 +150,17 @@ Next, let's add some branching logic. Here we'll add two more tasks, a left and
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.conditions.worker} block="add_branching" />
|
||||
<Snippet src={snippets.python.conditions.worker.add_branching} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.dag_match_condition.complex_workflow}
|
||||
block="add_branching"
|
||||
src={
|
||||
snippets.typescript.dag_match_condition.complex_workflow.add_branching
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.complex_conditions}
|
||||
block="add_branching"
|
||||
/>
|
||||
<Snippet src={snippets.go.workflows.complex_conditions.add_branching} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -175,18 +170,19 @@ Next, we'll add a task that waits for an event:
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.conditions.worker} block="add_wait_for_event" />
|
||||
<Snippet src={snippets.python.conditions.worker.add_wait_for_event} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.dag_match_condition.complex_workflow}
|
||||
block="add_wait_for_event"
|
||||
src={
|
||||
snippets.typescript.dag_match_condition.complex_workflow
|
||||
.add_wait_for_event
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.complex_conditions}
|
||||
block="add_wait_for_event"
|
||||
src={snippets.go.workflows.complex_conditions.add_wait_for_event}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -195,16 +191,16 @@ And finally, we'll add the last task, which collects all of its parents and sums
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.conditions.worker} block="add_sum" />
|
||||
<Snippet src={snippets.python.conditions.worker.add_sum}/>
|
||||
|
||||
Note that in this task, we rely on `ctx.was_skipped` to determine if a task was skipped.
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.dag_match_condition.complex_workflow} block="add_sum" />
|
||||
<Snippet src={snippets.typescript.dag_match_condition.complex_workflow.add_sum}/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.complex_conditions} block="add_sum" />
|
||||
<Snippet src={snippets.go.workflows.complex_conditions.add_sum}/>
|
||||
</Tabs.Tab>
|
||||
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Tabs, Callout } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -54,26 +54,26 @@ You can define a task with a cron schedule by configuring the cron expression as
|
||||
<UniversalTabs items={["Python-Sync", "Python-Async", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python-Sync">
|
||||
<Snippet
|
||||
src={snips.python.cron.workflow_definition}
|
||||
block="workflow_definition_cron_trigger"
|
||||
src={
|
||||
snippets.python.cron.workflow_definition
|
||||
.workflow_definition_cron_trigger
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Python-Async">
|
||||
<Snippet
|
||||
src={snips.python.cron.workflow_definition}
|
||||
block="workflow_definition_cron_trigger"
|
||||
src={
|
||||
snippets.python.cron.workflow_definition
|
||||
.workflow_definition_cron_trigger
|
||||
}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.on_cron.workflow}
|
||||
block="run_workflow_on_cron"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.on_cron.workflow.run_workflow_on_cron} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.on_cron}
|
||||
block="workflow_definition_cron_trigger"
|
||||
src={snippets.go.workflows.on_cron.workflow_definition_cron_trigger}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -96,16 +96,16 @@ Here's an example of creating a a cron to trigger a report for a specific custom
|
||||
|
||||
<UniversalTabs items={["Python-Sync", "Python-Async", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python-Sync">
|
||||
<Snippet src={snips.python.cron.programatic_sync} block="create" />
|
||||
<Snippet src={snippets.python.cron.programatic_sync.create} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Python-Async">
|
||||
<Snippet src={snips.python.cron.programatic_async} block="create" />
|
||||
<Snippet src={snippets.python.cron.programatic_async.create} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.simple.cron} block="create" />
|
||||
<Snippet src={snippets.typescript.simple.cron.create} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.run.cron} block="create" />
|
||||
<Snippet src={snippets.go.run.cron.create} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -124,16 +124,16 @@ You can delete a cron trigger by passing the cron object or a cron trigger id to
|
||||
|
||||
<UniversalTabs items={["Python-Sync", "Python-Async", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python-Sync">
|
||||
<Snippet src={snips.python.cron.programatic_sync} block="delete" />
|
||||
<Snippet src={snippets.python.cron.programatic_sync.delete} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Python-Async">
|
||||
<Snippet src={snips.python.cron.programatic_async} block="delete" />
|
||||
<Snippet src={snippets.python.cron.programatic_async.delete} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.simple.cron} block="delete" />
|
||||
<Snippet src={snippets.typescript.simple.cron.delete} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.run.cron} block="delete" />
|
||||
<Snippet src={snippets.go.run.cron.delete} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -149,16 +149,16 @@ Retrieves a list of all task cron triggers matching the criteria.
|
||||
|
||||
<UniversalTabs items={["Python-Sync", "Python-Async", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python-Sync">
|
||||
<Snippet src={snips.python.cron.programatic_sync} block="list" />
|
||||
<Snippet src={snippets.python.cron.programatic_sync.list} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Python-Async">
|
||||
<Snippet src={snips.python.cron.programatic_async} block="list" />
|
||||
<Snippet src={snippets.python.cron.programatic_async.list} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.simple.cron} block="list" />
|
||||
<Snippet src={snippets.typescript.simple.cron.list} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.run.cron} block="list" />
|
||||
<Snippet src={snippets.go.run.cron.list} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -49,7 +49,7 @@ export const simple = hatchet.workflow<DagInput, DagOutput>({
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
<Snippet src={snips.go.workflows.dag} block="declaring_a_workflow" />
|
||||
<Snippet src={snippets.go.workflows.dag.declaring_a_workflow} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -107,7 +107,7 @@ asynchronous.
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
<Snippet src={snips.go.workflows.dag} block="defining_a_task" />
|
||||
<Snippet src={snippets.go.workflows.dag.defining_a_task} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -161,7 +161,7 @@ dag.task({
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
<Snippet src={snips.go.workflows.dag} block="adding_a_task_with_a_parent" />
|
||||
<Snippet src={snippets.go.workflows.dag.adding_a_task_with_a_parent} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -24,8 +24,7 @@ Since dependencies are run before tasks are executed, having many dependencies (
|
||||
To add dependencies to your tasks, import `Depends` from the `hatchet_sdk`. Then:
|
||||
|
||||
<Snippet
|
||||
src={snips.python.dependency_injection.worker}
|
||||
block="declare_dependencies"
|
||||
src={snippets.python.dependency_injection.worker.declare_dependencies}
|
||||
/>
|
||||
|
||||
In this example, we've declared two dependencies: one synchronous and one asynchronous. You can do anything you like in your dependencies, such as creating database sessions, managing configuration, sharing instances of service-layer logic, and more.
|
||||
@@ -33,8 +32,7 @@ In this example, we've declared two dependencies: one synchronous and one asynch
|
||||
Once you've defined your dependency functions, inject them into your tasks as follows:
|
||||
|
||||
<Snippet
|
||||
src={snips.python.dependency_injection.worker}
|
||||
block="inject_dependencies"
|
||||
src={snippets.python.dependency_injection.worker.inject_dependencies}
|
||||
/>
|
||||
|
||||
<Callout type="warning" emoji="⚠️">
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -14,17 +14,17 @@ Durable events are declared using the context method `WaitFor` (or utility metho
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
|
||||
<Snippet src={snips.python.durable_event.worker} block="durable_event" />
|
||||
<Snippet src={snippets.python.durable_event.worker.durable_event} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
|
||||
<Snippet src={snips.typescript.durable_event.workflow} block="durable_event" />
|
||||
<Snippet src={snippets.typescript.durable_event.workflow.durable_event} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
<Snippet src={snips.go.workflows.durable_event} block="durable_event" />
|
||||
<Snippet src={snippets.go.workflows.durable_event.durable_event} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -36,26 +36,19 @@ Durable events can be filtered using [CEL](https://github.com/google/cel-spec) e
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
|
||||
<Snippet
|
||||
src={snips.python.durable_event.worker}
|
||||
block="durable_event_with_filter"
|
||||
/>
|
||||
<Snippet src={snippets.python.durable_event.worker.durable_event_with_filter} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.durable_event.workflow}
|
||||
block="durable_event_with_filter"
|
||||
src={snippets.typescript.durable_event.workflow.durable_event_with_filter}
|
||||
/>
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
<Snippet
|
||||
src={snips.go.workflows.durable_event}
|
||||
block="durable_event_with_filter"
|
||||
/>
|
||||
<Snippet src={snippets.go.workflows.durable_event.durable_event_with_filter} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
|
||||
import { Callout } from "nextra/components";
|
||||
@@ -25,11 +25,11 @@ Tasks that are declared as being durable (using `durable_task` instead of `task`
|
||||
|
||||
Now that we know a bit about how Hatchet handles durable execution, let's build a task. We'll start by declaring a task that will run durably, on the "durable worker".
|
||||
|
||||
<Snippet src={snips.python.durable.worker} block="create_a_durable_workflow" />
|
||||
<Snippet src={snippets.python.durable.worker.create_a_durable_workflow} />
|
||||
|
||||
Here, we've declared a Hatchet task just like any other. Now, we can add some tasks to it:
|
||||
|
||||
<Snippet src={snips.python.durable.worker} block="add_durable_task" />
|
||||
<Snippet src={snippets.python.durable.worker.add_durable_task} />
|
||||
|
||||
We've added two tasks to our workflow. The first is a normal, "ephemeral" task, which does not leverage any of Hatchet's durable features.
|
||||
|
||||
@@ -51,6 +51,5 @@ If this task is interrupted at any time, it will continue from where it left off
|
||||
Similarly to in [conditional workflows](./conditional-workflows.mdx#or-groups), durable tasks can also use or groups in the wait conditions they use. For example, you could wait for either an event or a sleep (whichever comes first) like this:
|
||||
|
||||
<Snippet
|
||||
src={snips.python.durable.worker}
|
||||
block="add_durable_tasks_that_wait_for_or_groups"
|
||||
src={snippets.python.durable.worker.add_durable_tasks_that_wait_for_or_groups}
|
||||
/>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -16,17 +16,17 @@ Durable sleep can be used by calling the `SleepFor` method on the `DurableContex
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
|
||||
<Snippet src={snips.python.durable_sleep.worker} block="durable_sleep" />
|
||||
<Snippet src={snippets.python.durable_sleep.worker.durable_sleep} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
|
||||
<Snippet src={snips.typescript.durable_sleep.workflow} block="durable_sleep" />
|
||||
<Snippet src={snippets.typescript.durable_sleep.workflow.durable_sleep} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
<Snippet src={snips.go.workflows.durable_sleep} block="durable_sleep" />
|
||||
<Snippet src={snippets.go.workflows.durable_sleep.durable_sleep} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -24,7 +24,7 @@ Hatchet's Python SDK allows you define a **_lifespan_**, which is an async gener
|
||||
|
||||
To use Hatchet's `lifespan` feature, define an async generator and pass it into your `worker`:
|
||||
|
||||
<Snippet src={snips.python.lifespans.worker} block="define_a_lifespan" />
|
||||
<Snippet src={snippets.python.lifespans.worker.define_a_lifespan} />
|
||||
|
||||
When the worker starts, it will run the lifespan up to the `yield`. Then, on worker shutdown, it will clean up by running everything after the `yield` (the same as with any other generator).
|
||||
|
||||
@@ -34,10 +34,7 @@ When the worker starts, it will run the lifespan up to the `yield`. Then, on wor
|
||||
|
||||
Then, to use your lifespan in a task, you can extract it from the context with `Context.lifespan`.
|
||||
|
||||
<Snippet
|
||||
src={snips.python.lifespans.worker}
|
||||
block="use_the_lifespan_in_a_task"
|
||||
/>
|
||||
<Snippet src={snippets.python.lifespans.worker.use_the_lifespan_in_a_task} />
|
||||
|
||||
<Callout type="info" emoji="💡">
|
||||
For type checking, cast the `Context.lifespan` to whatever type your lifespan
|
||||
|
||||
@@ -1,20 +1,8 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { FileTree } from "nextra/components";
|
||||
import { Tabs, Callout } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
import { GithubSnippet, getSnippets } from "@/components/code";
|
||||
|
||||
export const LoggerWorkflowTs = {
|
||||
path: "src/examples/logger.ts",
|
||||
};
|
||||
|
||||
export const ByoLoggerTs = {
|
||||
path: "src/examples/byo-logger.ts",
|
||||
};
|
||||
|
||||
export const getStaticProps = ({}) =>
|
||||
getSnippets([LoggerWorkflowTs, ByoLoggerTs]);
|
||||
|
||||
# Logging
|
||||
|
||||
@@ -29,7 +17,7 @@ You can use either Python's built-in `logging` package, or the `context.log` met
|
||||
|
||||
You can pass a custom logger to the `Hatchet` class when initializing it. For example:
|
||||
|
||||
<Snippet src={snips.python.logger.client} block="rootlogger" />
|
||||
<Snippet src={snippets.python.logger.client.root_logger} />
|
||||
|
||||
It's recommended that you pass the root logger to the `Hatchet` class, as this will ensure that all logs are captured by the Hatchet logger. If you have workflows defined in multiple files, they should be children of the root logger. For example, with the following file structure:
|
||||
|
||||
@@ -43,17 +31,17 @@ It's recommended that you pass the root logger to the `Hatchet` class, as this w
|
||||
|
||||
You should pass the root logger to the `Hatchet` class in `client.py`:
|
||||
|
||||
<Snippet src={snips.python.logger.client} block="rootlogger" />
|
||||
<Snippet src={snippets.python.logger.client.root_logger} />
|
||||
|
||||
And then in `workflows/workflow.py`, you should create a child logger:
|
||||
|
||||
<Snippet src={snips.python.logger.workflow} block="loggingworkflow" />
|
||||
<Snippet src={snippets.python.logger.workflow.logging_workflow} />
|
||||
|
||||
## Using the `context.log` method
|
||||
|
||||
You can also use the `context.log` method to log messages from your workflows. This method is available on the `Context` object that is passed to each task in your workflow. For example:
|
||||
|
||||
<Snippet src={snips.python.logger.workflow} block="contextlogger" />
|
||||
<Snippet src={snippets.python.logger.workflow.context_logger} />
|
||||
|
||||
Each task is currently limited to 1000 log lines.
|
||||
|
||||
@@ -63,17 +51,17 @@ Each task is currently limited to 1000 log lines.
|
||||
|
||||
In TypeScript, there are two options for logging from your tasks. The first is to use the `ctx.log()` method (from the `Context`) to send logs:
|
||||
|
||||
<GithubSnippet src={LoggerWorkflowTs} target="Logger" />
|
||||
<Snippet src={snippets.typescript.logging.logger.logger} />
|
||||
|
||||
This has the benefit of being easy to use out of the box (no setup required!), but it's limited in its flexibiliy and how pluggable it is with your existing logging setup.
|
||||
|
||||
Hatchet also allows you to "bring your own" logger when you define a workflow:
|
||||
|
||||
<GithubSnippet src={ByoLoggerTs} target="Create Pino logger" />
|
||||
<Snippet src={snippets.typescript.logging.byo_logger.create_pino_logger} />
|
||||
|
||||
In this example, we create Pino logger that implement's Hatchet's `Logger` interface and pass it to the Hatchet client constructor. We can then use that logger in our steps:
|
||||
|
||||
<GithubSnippet src={ByoLoggerTs} target="Use the logger" />
|
||||
<Snippet src={snippets.typescript.logging.logger.logger} />
|
||||
|
||||
</Tabs.Tab>
|
||||
{/* <Tabs.Tab>
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
import { GithubSnippet, getSnippets } from "@/components/code";
|
||||
|
||||
export const SlotReleaseTS = {
|
||||
path: "src/examples/manual_slot_release.ts",
|
||||
};
|
||||
|
||||
export const getStaticProps = ({}) => getSnippets([SlotReleaseTS]);
|
||||
|
||||
# Manual Slot Release
|
||||
|
||||
@@ -28,19 +21,14 @@ In some cases, you may have a task in your workflow that is resource-intensive a
|
||||
|
||||
You can manually release a slot in from within a running task in your workflow using the Hatchet context method `release_slot`:
|
||||
|
||||
<UniversalTabs items={['Python', 'Typescript']}>
|
||||
<UniversalTabs items={['Python']}>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.python.manual_slot_release.worker} block="slotrelease" />
|
||||
<Snippet src={snippets.python.manual_slot_release.worker.slot_release} />
|
||||
|
||||
</Tabs.Tab>
|
||||
|
||||
<Tabs.Tab>
|
||||
|
||||
<GithubSnippet src={SlotReleaseTS} target="SlotRelease" />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
```go
|
||||
|
||||
func StepOne(ctx worker.HatchetContext) (result \*taskOneOutput, err error) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
|
||||
## Hatchet Python V1 Migration Guide
|
||||
@@ -9,7 +9,7 @@ This guide will help you migrate Hatchet workflows from the V0 SDK to the V1 SDK
|
||||
|
||||
First, a simple example of how to define a task with the V1 SDK:
|
||||
|
||||
<Snippet src={snips.python.simple.worker} block="simple" />
|
||||
<Snippet src={snippets.python.simple.worker.simple} />
|
||||
|
||||
The API has changed significantly in the V1 SDK. Even in this simple example, there are some notable highlights:
|
||||
|
||||
@@ -21,7 +21,7 @@ The API has changed significantly in the V1 SDK. Even in this simple example, th
|
||||
|
||||
Hatchet's V1 SDK makes heavy use of Pydantic models, and recommends you do too! Let's dive into a more involved example using Pydantic in a fanout example.
|
||||
|
||||
<Snippet src={snips.python.fanout.worker} block="fanoutparent" />
|
||||
<Snippet src={snippets.python.fanout.worker.fanout_parent} />
|
||||
|
||||
In this example, we use a few more new SDK features:
|
||||
|
||||
@@ -30,7 +30,7 @@ In this example, we use a few more new SDK features:
|
||||
3. When we want to spawn the child workflow, we can use the `run` methods on the `child_workflow` object, which is a Hatchet `Workflow`, instead of needing to refer to the workflow by its name (a string). The `input` field to `run()` is now also properly typed as `ChildInput`.
|
||||
4. The child workflow (see below) makes use of some of Hatchet's DAG features, such as defining parent tasks. In the new SDK, `parents` of a task are defined as a list of `Task` objects as opposed to as a list of strings, so now, `process2` has `process` (the `Task`) as its parent, as opposed to `"process"` (the string). This also allows us to use `ctx.task_output(process)` to access the output of the `process` task in the `process2` task, and know the type of that output at type checking time.
|
||||
|
||||
<Snippet src={snips.python.fanout.worker} block="fanoutchild" />
|
||||
<Snippet src={snippets.python.fanout.worker.fanout_child} />
|
||||
|
||||
See our [Pydantic documentation](./pydantic.mdx) for more.
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
|
||||
import { Callout } from "nextra/components";
|
||||
@@ -11,15 +11,15 @@ This guide will help you migrate Hatchet workflows from the V0 SDK to the V1 SDK
|
||||
|
||||
First, we've exposed a new `hatchet.task` method in the V1 SDK for single function tasks.
|
||||
|
||||
<Snippet src={snips.typescript.simple.workflow} block="ALL" />
|
||||
<Snippet src={snippets.typescript.simple.workflow.declaring_a_task} />
|
||||
|
||||
DAGs are still defined as workflows, but they can now be declared using the `hatchet.workflow` method.
|
||||
|
||||
<Snippet src={snips.typescript.dag.workflow} block="declaring_a_dag_workflow" />
|
||||
<Snippet src={snippets.typescript.dag.workflow.declaring_a_dag_workflow} />
|
||||
|
||||
You can now run work for tasks and workflows by directly interacting with the returned object.
|
||||
|
||||
<Snippet src={snips.typescript.simple.run} block="running_a_task" />
|
||||
<Snippet src={snippets.typescript.simple.run.running_a_task} />
|
||||
|
||||
There are a few important things to note when migrating to the new SDK:
|
||||
|
||||
@@ -35,16 +35,12 @@ The new SDK also provides improved type support for spawning child tasks from ar
|
||||
|
||||
First, we declare a child task:
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.child_workflows.workflow}
|
||||
block="declaring_a_child"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.child_workflows.workflow.declaring_a_child} />
|
||||
|
||||
Next, we spawn that child from a parent task:
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.child_workflows.workflow}
|
||||
block="declaring_a_parent"
|
||||
src={snippets.typescript.child_workflows.workflow.declaring_a_parent}
|
||||
/>
|
||||
|
||||
In this example, the compiler knows what to expect for the types of `input` and `ctx` for each of the tasks, as well as the type of the input of the `child` task when spawning it from the `parent` task.
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
import { GithubSnippet, getSnippets } from "@/components/code";
|
||||
|
||||
# On-Failure Tasks
|
||||
|
||||
@@ -14,19 +13,16 @@ You can define an on-failure task on your task the same as you'd define any othe
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.on_failure.worker} block="onfailure_step" />
|
||||
<Snippet src={snippets.python.on_failure.worker.on_failure_step} />
|
||||
<Callout variant="warning">
|
||||
Note: Only one on-failure task can be defined per workflow.
|
||||
</Callout>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.on_failure.workflow}
|
||||
block="on_failure_task"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.on_failure.workflow.on_failure_task} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.on_failure} />
|
||||
<Snippet src={snippets.go.workflows.on_failure.all} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Callout } from "nextra/components";
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
|
||||
# OpenTelemetry
|
||||
@@ -64,8 +64,10 @@ For example, you might be interested in using [Langfuse](https://langfuse.com/)
|
||||
First, configure the Langfuse client [as described by their documentation](https://langfuse.com/docs/opentelemetry/example-python-sdk):
|
||||
|
||||
<Snippet
|
||||
src={snips.python.opentelemetry_instrumentation.langfuse.client}
|
||||
block="configure_langfuse"
|
||||
src={
|
||||
snippets.python.opentelemetry_instrumentation.langfuse.client
|
||||
.configure_langfuse
|
||||
}
|
||||
/>
|
||||
|
||||
Langfuse will set the global tracer provider, so you don't have to do it manually.
|
||||
@@ -73,22 +75,24 @@ Langfuse will set the global tracer provider, so you don't have to do it manuall
|
||||
Next, create an OpenAI client [using Langfuse's OpenAI wrapper `langfuse.openai` as a drop-in replacement for the default OpenAI](https://langfuse.com/docs/integrations/openai/python/get-started) client:
|
||||
|
||||
<Snippet
|
||||
src={snips.python.opentelemetry_instrumentation.langfuse.client}
|
||||
block="create_openai_client"
|
||||
src={
|
||||
snippets.python.opentelemetry_instrumentation.langfuse.client
|
||||
.create_open_ai_client
|
||||
}
|
||||
/>
|
||||
|
||||
And that's it! Now you're ready to instrument your Hatchet workers with Langfuse. For example, create a task like this:
|
||||
|
||||
<Snippet
|
||||
src={snips.python.opentelemetry_instrumentation.langfuse.worker}
|
||||
block="task"
|
||||
src={snippets.python.opentelemetry_instrumentation.langfuse.worker.task}
|
||||
/>
|
||||
|
||||
And finally, run the task to view the Langfuse traces (cost, usage, etc.) interspersed with Hatchet's traces, in addition to any other traces you may have:
|
||||
|
||||
<Snippet
|
||||
src={snips.python.opentelemetry_instrumentation.langfuse.trigger}
|
||||
block="trigger_task"
|
||||
src={
|
||||
snippets.python.opentelemetry_instrumentation.langfuse.trigger.trigger_task
|
||||
}
|
||||
/>
|
||||
|
||||
When you run this task, you'll see a trace like this in Langfuse!
|
||||
|
||||
@@ -1,8 +1,3 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
|
||||
# Task Orchestration
|
||||
|
||||
Not only can you run a single task in Hatchet, but you can also orchestrate multiple tasks together based on a shape that you define. For example, you can run a task that depends on the output of another task, or you can run a task that waits for a certain condition to be met before running.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
@@ -31,19 +31,18 @@ First, you can set a default priority at the workflow level:
|
||||
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.priority.worker} block="default_priority" />
|
||||
<Snippet src={snippets.python.priority.worker.default_priority} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.priority.workflow}
|
||||
block="task_priority_in_a_workflow"
|
||||
src={snippets.typescript.priority.workflow.task_priority_in_a_workflow}
|
||||
/>
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.go.workflows.priority} block="default_priority" />
|
||||
<Snippet src={snippets.go.workflows.priority.default_priority} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -57,19 +56,16 @@ When you trigger a run, you can set the priority of the triggered run to overrid
|
||||
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.priority.trigger} block="runtime_priority" />
|
||||
<Snippet src={snippets.python.priority.trigger.runtime_priority} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet
|
||||
src={snips.typescript.priority.run}
|
||||
block="run_a_task_with_a_priority"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.priority.run.run_a_task_with_a_priority} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.go.run.priority} block="running_a_task_with_priority" />
|
||||
<Snippet src={snippets.go.run.priority.running_a_task_with_priority} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -79,16 +75,16 @@ Similarly, you can also assign a priority to scheduled and cron workflows.
|
||||
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.priority.trigger} block="scheduled_priority" />
|
||||
<Snippet src={snippets.python.priority.trigger.scheduled_priority} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.typescript.priority.run} block="schedule_and_cron" />
|
||||
<Snippet src={snippets.typescript.priority.run.schedule_and_cron} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.go.run.priority} block="schedule_and_cron" />
|
||||
<Snippet src={snippets.go.run.priority.schedule_and_cron} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
|
||||
# Pydantic Support
|
||||
@@ -16,7 +16,7 @@ To enable Pydantic for validation, you'll need to:
|
||||
|
||||
By default, if no `input_validator` is provided, the `EmptyModel` is used, which is a Pydantic model that accepts any input. For example:
|
||||
|
||||
<Snippet src={snips.python.simple.worker} block="simple" />
|
||||
<Snippet src={snippets.python.simple.worker.simple} />
|
||||
|
||||
In this simple example, the `input` that's injected into the task accepts an argument `input`, which is of type `EmptyModel`. The `EmptyModel` can be imported directly from Hatchet, and is an alias for:
|
||||
|
||||
@@ -33,12 +33,12 @@ Note that since `extra="allow"` is set, workflows will not fail with validation
|
||||
|
||||
We highly recommend creating Pydantic models to represent your workflow inputs and outputs. This will help you catch errors early and ensure that your workflows are well-typed. For example, consider a fanout workflow like this:
|
||||
|
||||
<Snippet src={snips.python.fanout.worker} block="fanoutparent" />
|
||||
<Snippet src={snippets.python.fanout.worker.fanout_parent} />
|
||||
|
||||
In this case, we've defined two workflows: a parent and a child. They both have their inputs typed, and the parent spawns the child. Note that `child_wf.create_workflow_run_config` is typed, so the type checker (and your IDE) know the type of the input to the child workflow.
|
||||
|
||||
Then, the child tasks are defined as follows:
|
||||
|
||||
<Snippet src={snips.python.fanout.worker} block="fanoutchild" />
|
||||
<Snippet src={snippets.python.fanout.worker.fanout_child} />
|
||||
|
||||
In the children, the inputs are validated by Pydantic, so you can access their attributes directly without needing a type cast or parsing a dictionary with the inputs instead.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
@@ -45,7 +45,7 @@ This pattern is especially useful for:
|
||||
|
||||
We can add one or more rate limits to a task by adding the `rate_limits` configuration to the task definition.
|
||||
|
||||
<Snippet src={snips.python.rate_limit.worker} block="dynamic" />
|
||||
<Snippet src={snippets.python.rate_limit.worker.dynamic} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
@@ -53,13 +53,13 @@ We can add one or more rate limits to a task by adding the `rate_limits` configu
|
||||
|
||||
We can add one or more rate limits to a task by adding the `rate_limits` configuration to the task definition.
|
||||
|
||||
<Snippet src={snips.typescript.rate_limit.workflow} block="dynamic" />
|
||||
<Snippet src={snippets.typescript.rate_limit.workflow.dynamic} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
> Note: Go requires both a key and KeyExpr be set and the LimitValueExpr must be a CEL.
|
||||
|
||||
<Snippet src={snips.go.workflows.ratelimit} block="dynamic_rate_limit" />
|
||||
<Snippet src={snippets.go.workflows.ratelimit.dynamic_rate_limit} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -98,12 +98,12 @@ hatchet.rate_limits.put(RATE_LIMIT_KEY, 10, RateLimitDuration.MINUTE)
|
||||
|
||||
{" "}
|
||||
|
||||
<Snippet src={snips.typescript.rate_limit.workflow} block="upsert_rate_limit" />
|
||||
<Snippet src={snippets.typescript.rate_limit.workflow.upsert_rate_limit} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.go.workflows.ratelimit} block="upsert_rate_limit" />
|
||||
<Snippet src={snippets.go.workflows.ratelimit.upsert_rate_limit} />
|
||||
|
||||
</Tabs.Tab>
|
||||
|
||||
@@ -116,16 +116,16 @@ With your rate limit key defined, specify the units of consumption for a specifi
|
||||
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.rate_limit.worker} block="static" />
|
||||
<Snippet src={snippets.python.rate_limit.worker.static} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.typescript.rate_limit.workflow} block="static" />
|
||||
<Snippet src={snippets.typescript.rate_limit.workflow.static} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.go.workflows.ratelimit} block="static_rate_limit" />
|
||||
<Snippet src={snippets.go.workflows.ratelimit.static_rate_limit} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
@@ -29,16 +29,13 @@ To enable retries for a task, simply add the `retries` property to the task obje
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.python.retries.worker} block="simple_step_retries" />
|
||||
<Snippet src={snippets.python.retries.worker.simple_step_retries} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet
|
||||
src={snips.typescript.retries.workflow}
|
||||
block="simple_step_retries"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.retries.workflow.simple_step_retries} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.go.workflows.retries} block="simple_step_retries" />
|
||||
<Snippet src={snippets.go.workflows.retries.simple_step_retries} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -54,16 +51,13 @@ If you need to access the current retry count within a task, you can use the `re
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.python.retries.worker} block="retries_with_count" />
|
||||
<Snippet src={snippets.python.retries.worker.retries_with_count} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet
|
||||
src={snips.typescript.retries.workflow}
|
||||
block="get_the_current_retry_count"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.retries.workflow.retries_with_count} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.go.workflows.retries} block="retries_with_count" />
|
||||
<Snippet src={snippets.go.workflows.retries.retries_with_count} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -73,16 +67,13 @@ Hatchet also supports exponential backoff for retries, which can be useful for h
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.retries.worker} block="retries_with_backoff" />
|
||||
<Snippet src={snippets.python.retries.worker.retries_with_backoff} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.retries.workflow}
|
||||
block="retries_with_backoff"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.retries.workflow.retries_with_backoff} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.retries} block="retries_with_backoff" />
|
||||
<Snippet src={snippets.go.workflows.retries.retries_with_backoff} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -96,21 +87,16 @@ The Hatchet SDKs each expose a `NonRetryable` exception, which allows you to byp
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet
|
||||
src={snips.python.non_retryable.worker}
|
||||
block="non_retryable_task"
|
||||
/>
|
||||
<Snippet src={snippets.python.non_retryable.worker.non_retryable_task} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.non_retryable.workflow}
|
||||
block="non_retrying_task"
|
||||
src={snippets.typescript.non_retryable.workflow.non_retrying_task}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.non_retryable_error}
|
||||
block="non_retryable_error"
|
||||
src={snippets.go.workflows.non_retryable_error.non_retryable_error}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -35,12 +35,12 @@ Note that the type of `input` here is a Pydantic model that matches the input sc
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
You can use your `Workflow` object to run a workflow and "forget" it by calling the `run_no_wait` method. This method enqueue a workflow run and return a `WorkflowRunRef`, a reference to that run, without waiting for the result.
|
||||
<Snippet src={snips.typescript.simple.enqueue} block="enqueuing_a_workflow_fire_and_forget" />
|
||||
<Snippet src={snippets.typescript.simple.enqueue.enqueuing_a_workflow_fire_and_forget} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
You can use your `Workflow` object to run a workflow and "forget" it by calling the `RunNoWait` method. This method enqueue a workflow run and return a `WorkflowRunRef`, a reference to that run, without waiting for the result.
|
||||
|
||||
<Snippet src={snips.go.run.simple} block="running_a_task_without_waiting" />
|
||||
<Snippet src={snippets.go.run.simple.running_a_task_without_waiting} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -67,12 +67,12 @@ result = await ref.aio_result()
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
You can use your `Workflow` object to run a workflow and "forget" it by calling the `run_no_wait` method. This method enqueue a workflow run and return a `WorkflowRunRef`, a reference to that run, without waiting for the result.
|
||||
<Snippet src={snips.typescript.simple.enqueue} block="subscribing_to_results" />
|
||||
<Snippet src={snippets.typescript.simple.enqueue.subscribing_to_results} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
You can use your `Workflow` object to run a workflow and "forget" it by calling the `RunNoWait` method. This method enqueue a workflow run and return a `WorkflowRunRef`, a reference to that run, without waiting for the result.
|
||||
|
||||
<Snippet src={snips.go.run.simple} block="subscribing_to_results" />
|
||||
<Snippet src={snippets.go.run.simple.subscribing_to_results} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -23,16 +23,15 @@ To run a task on an event, you need to declare the event that will trigger the t
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.events.worker} block="event_trigger" />
|
||||
<Snippet src={snippets.python.events.worker.event_trigger} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.on_event.workflow}
|
||||
block="run_workflow_on_event"
|
||||
src={snippets.typescript.on_event.workflow.run_workflow_on_event}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.on_event} block="run_workflow_on_event" />
|
||||
<Snippet src={snippets.go.workflows.on_event.run_workflow_on_event} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -53,13 +52,13 @@ You can push an event to the event queue by calling the `push` method on the Hat
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.events.event} block="event_trigger" />
|
||||
<Snippet src={snippets.python.events.event.event_trigger} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.on_event.event} block="pushing_an_event" />
|
||||
<Snippet src={snippets.typescript.on_event.event.pushing_an_event} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.run.event} block="pushing_an_event" />
|
||||
<Snippet src={snippets.go.run.event.pushing_an_event} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -77,19 +76,13 @@ The simplest way to create a filter is to register it declaratively with your wo
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet
|
||||
src={snips.python.events.worker}
|
||||
block="event_trigger_with_filter"
|
||||
/>
|
||||
<Snippet src={snippets.python.events.worker.event_trigger_with_filter} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.on_event.workflow}
|
||||
block="workflow_with_filter"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.on_event.workflow.workflow_with_filter} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.on_event} block="declare_with_filter" />
|
||||
<Snippet src={snippets.go.workflows.on_event.declare_with_filter} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -101,13 +94,13 @@ You also can create event filters by using the `filters` clients on the SDKs:
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.events.filter} block="create_a_filter" />
|
||||
<Snippet src={snippets.python.events.filter.create_a_filter} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.on_event.filter} block="create_a_filter" />
|
||||
<Snippet src={snippets.typescript.on_event.filter.create_a_filter} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.run.event} block="create_a_filter" />
|
||||
<Snippet src={snippets.go.run.event.create_a_filter} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -123,13 +116,13 @@ Then, push an event that uses the filter to determine whether or not to run. For
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.events.filter} block="skip_a_run" />
|
||||
<Snippet src={snippets.python.events.filter.skip_a_run} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.on_event.filter} block="skip_a_run" />
|
||||
<Snippet src={snippets.typescript.on_event.filter.skip_a_run} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.run.event} block="skip_a_run" />
|
||||
<Snippet src={snippets.go.run.event.skip_a_run} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -137,13 +130,13 @@ But this one will be triggered since the payload _does_ match the expression:
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.events.filter} block="trigger_a_run" />
|
||||
<Snippet src={snippets.python.events.filter.trigger_a_run} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.on_event.filter} block="trigger_a_run" />
|
||||
<Snippet src={snippets.typescript.on_event.filter.trigger_a_run} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.run.event} block="trigger_a_run" />
|
||||
<Snippet src={snippets.go.run.event.trigger_a_run} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -158,21 +151,16 @@ You can access the filter payload by using the `Context` in the task that was tr
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet
|
||||
src={snips.python.events.worker}
|
||||
block="accessing_the_filter_payload"
|
||||
/>
|
||||
<Snippet src={snippets.python.events.worker.accessing_the_filter_payload} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.on_event.workflow}
|
||||
block="accessing_the_filter_payload"
|
||||
src={snippets.typescript.on_event.workflow.accessing_the_filter_payload}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet
|
||||
src={snips.go.workflows.on_event}
|
||||
block="accessing_the_filter_payload"
|
||||
src={snippets.go.workflows.on_event.accessing_the_filter_payload}
|
||||
/>
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -17,22 +17,22 @@ One method for running a task in Hatchet is to run it and wait for its result. S
|
||||
|
||||
You can use your `Task` object to run a task and wait for it to complete by calling the `run` method. This method will block until the task completes and return the result.
|
||||
|
||||
<Snippet src={snips.python.child.trigger} block="running_a_task" />
|
||||
<Snippet src={snippets.python.child.trigger.running_a_task} />
|
||||
|
||||
You can also `await` the result of `aio_run`:
|
||||
|
||||
<Snippet src={snips.python.child.trigger} block="running_a_task_aio" />
|
||||
<Snippet src={snippets.python.child.trigger.running_a_task_aio} />
|
||||
|
||||
Note that the type of `input` here is a Pydantic model that matches the input schema of your workflow.
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
You can use your `Task` object to run a task and wait for it to complete by calling the `run` method. This method will return a promise that resolves when the task completes and returns the result.
|
||||
<Snippet src={snips.typescript.simple.run} block="running_a_task" />
|
||||
<Snippet src={snippets.typescript.simple.run.running_a_task} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
You can use your `Task` object to run a task and wait for it to complete by calling the `Run` method. This method will block until the task completes and return the result.
|
||||
<Snippet src={snips.go.run.simple} block="running_a_task" />
|
||||
<Snippet src={snippets.go.run.simple.running_a_task} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -46,8 +46,7 @@ You can also spawn tasks from within a task. This is useful for composing tasks
|
||||
You can run a task from within a task by calling the `aio_run` method on the task object from within a task function. This will associate the runs in the dashboard for easier debugging.
|
||||
|
||||
<Snippet
|
||||
src={snips.python.child.simple_fanout}
|
||||
block="running_a_task_from_within_a_task"
|
||||
src={snippets.python.child.simple_fanout.running_a_task_from_within_a_task}
|
||||
/>
|
||||
|
||||
And that's it! The parent task will run and spawn the child task, and then will collect the results from its tasks.
|
||||
@@ -57,14 +56,14 @@ And that's it! The parent task will run and spawn the child task, and then will
|
||||
|
||||
You can run a task from within a task by calling the `runChild` method on the `ctx` parameter of the task function. This will associate the runs in the dashboard for easier debugging.
|
||||
|
||||
<Snippet src={snips.typescript.simple.run} block="spawning_tasks_from_within_a_task" />
|
||||
<Snippet src={snippets.typescript.simple.run.spawning_tasks_from_within_a_task} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
You can run a task from within a task by calling the `Run` method on the task object from within a task function. This will associate the runs in the dashboard for easier debugging.
|
||||
|
||||
<Snippet src={snips.go.workflows.simple} block="spawning_tasks_from_within_a_task" />
|
||||
<Snippet src={snippets.go.workflows.simple.spawning_tasks_from_within_a_task} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -78,21 +77,21 @@ Sometimes you may want to run multiple tasks concurrently. Here's how to do that
|
||||
|
||||
Since the `aio_run` method returns a coroutine, you can spawn multiple tasks in parallel and await using `asyncio.gather`.
|
||||
|
||||
<Snippet src={snips.python.child.trigger} block="running_multiple_tasks" />
|
||||
<Snippet src={snippets.python.child.trigger.running_multiple_tasks} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
|
||||
Since the `run` method returns a promise, you can spawn multiple tasks in parallel and await using `Promise.all`.
|
||||
|
||||
<Snippet src={snips.typescript.simple.run} block="running_multiple_tasks" />
|
||||
<Snippet src={snippets.typescript.simple.run.running_multiple_tasks} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
You can run multiple tasks in parallel by calling `Run` multiple times in goroutines and using a `sync.WaitGroup` to wait for them to complete.
|
||||
|
||||
<Snippet src={snips.go.run.simple} block="running_multiple_tasks" />
|
||||
<Snippet src={snippets.go.run.simple.running_multiple_tasks} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -20,10 +20,10 @@ simple.run(
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.simple.run} block="running_a_task" />
|
||||
<Snippet src={snippets.typescript.simple.run.running_a_task} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.simple} block="running_a_task" />
|
||||
<Snippet src={snippets.go.workflows.simple.running_a_task} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
import { GithubSnippet, getSnippets } from "@/components/code";
|
||||
|
||||
export const ScheduleTriggerGo = {
|
||||
path: "examples/go/z_v0/scheduled/main.go",
|
||||
};
|
||||
|
||||
export const getStaticProps = ({}) => getSnippets([ScheduleTriggerGo]);
|
||||
|
||||
# Scheduled Runs
|
||||
|
||||
@@ -53,10 +46,10 @@ print(schedule.id)
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.simple.schedule} block="create_a_scheduled_run" />
|
||||
<Snippet src={snippets.typescript.simple.schedule.create_a_scheduled_run} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<GithubSnippet src={ScheduleTriggerGo} target="Create" />
|
||||
<Snippet src={snippets.go.z_v0.scheduled.main.create} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -75,16 +68,13 @@ You can delete a scheduled run by calling the `delete` method on the scheduled r
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.scheduled.programatic_sync} block="delete" />
|
||||
<Snippet src={snippets.python.scheduled.programatic_sync.delete} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.simple.schedule}
|
||||
block="delete_a_scheduled_run"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.simple.schedule.delete_a_scheduled_run} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<GithubSnippet src={ScheduleTriggerGo} target="Delete" />
|
||||
<Snippet src={snippets.go.z_v0.scheduled.main.delete} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -94,16 +84,13 @@ You can list all scheduled runs for a task by calling the `list` method on the s
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.scheduled.programatic_sync} block="list" />
|
||||
<Snippet src={snippets.python.scheduled.programatic_sync.list} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet
|
||||
src={snips.typescript.simple.schedule}
|
||||
block="list_scheduled_runs"
|
||||
/>
|
||||
<Snippet src={snippets.typescript.simple.schedule.list_scheduled_runs} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<GithubSnippet src={ScheduleTriggerGo} target="List" />
|
||||
<Snippet src={snippets.go.z_v0.scheduled.main.list} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Tabs, Callout } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
import { GithubSnippet, getSnippets } from "@/components/code";
|
||||
|
||||
export const StickyWorkerGo = {
|
||||
path: "examples/go/z_v0/assignment-sticky/main.go",
|
||||
};
|
||||
|
||||
export const getStaticProps = ({}) => getSnippets([StickyWorkerGo]);
|
||||
|
||||
# Sticky Worker Assignment (Beta)
|
||||
|
||||
@@ -43,18 +36,17 @@ There are two strategies for setting sticky assignment for [DAG](./dags.mdx) wor
|
||||
<UniversalTabs items={['Python', 'Typescript']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.sticky_workers.worker} block="stickyworker" />
|
||||
<Snippet src={snippets.python.sticky_workers.worker.sticky_worker} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.typescript.sticky.workflow} block="sticky_task" />
|
||||
<Snippet src={snippets.typescript.sticky.workflow.sticky_task} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<GithubSnippet src={StickyWorkerGo} target="StickyWorker" />
|
||||
|
||||
<Snippet src={snippets.go.z_v0.assignment_sticky.main.all} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -74,17 +66,17 @@ If either condition is not met, an error will be thrown when the child task is s
|
||||
<UniversalTabs items={['Python', 'Typescript']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.sticky_workers.worker} block="stickychild" />
|
||||
<Snippet src={snippets.python.sticky_workers.worker.sticky_child} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.typescript.sticky.workflow} block="sticky_task" />
|
||||
<Snippet src={snippets.typescript.sticky.workflow.sticky_task} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<GithubSnippet src={StickyWorkerGo} target="StickyChild" />
|
||||
<Snippet src={snippets.go.z_v0.assignment_sticky.main.all} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -13,17 +13,17 @@ You can stream data out of a task run by using the `put_stream` (or equivalent)
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.streaming.worker} block="streaming" />
|
||||
<Snippet src={snippets.python.streaming.worker.streaming} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
|
||||
<Snippet src={snips.typescript.streaming.workflow} block="streaming" />
|
||||
<Snippet src={snippets.typescript.streaming.workflow.streaming} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
<Snippet src={snips.go.streaming.shared.task} block="streaming" />
|
||||
<Snippet src={snippets.go.streaming.shared.task.streaming} />
|
||||
|
||||
</Tabs.Tab>
|
||||
|
||||
@@ -37,17 +37,17 @@ You can easily consume stream events by using the stream method on the workflow
|
||||
|
||||
<UniversalTabs items={["Python", "Typescript", "Go"]}>
|
||||
<Tabs.Tab title="Python">
|
||||
<Snippet src={snips.python.streaming.async_stream} block="consume" />
|
||||
<Snippet src={snippets.python.streaming.async_stream.consume} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
|
||||
<Snippet src={snips.typescript.streaming.run} block="consume" />
|
||||
<Snippet src={snippets.typescript.streaming.run.consume} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
<Snippet src={snips.go.streaming.consumer.main} block="consume" />
|
||||
<Snippet src={snippets.go.streaming.consumer.main.consume} />
|
||||
|
||||
</Tabs.Tab>
|
||||
|
||||
@@ -79,21 +79,21 @@ In both cases, we recommend using your application's backend as a proxy for the
|
||||
|
||||
For example, with FastAPI, you'd do the following:
|
||||
|
||||
<Snippet src={snips.python.streaming.fastapi_proxy} block="fastapi_proxy" />
|
||||
<Snippet src={snippets.python.streaming.fastapi_proxy.fast_api_proxy} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
|
||||
For example, with NextJS backend-as-frontend, you'd do the following:
|
||||
|
||||
<Snippet src={snips.typescript.streaming.nextjs_proxy} block="nextjs_proxy" />
|
||||
<Snippet src={snippets.typescript.streaming.nextjs_proxy.next_js_proxy} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
|
||||
For example, with Go's built-in HTTP server, you'd do the following:
|
||||
|
||||
<Snippet src={snips.go.streaming.server.main} block="server" />
|
||||
<Snippet src={snippets.go.streaming.server.main.server} />
|
||||
|
||||
</Tabs.Tab>
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
import { GithubSnippet, getSnippets } from "@/components/code";
|
||||
|
||||
# Timeouts in Hatchet
|
||||
|
||||
@@ -40,15 +39,15 @@ You can specify execution and scheduling timeouts for a task using the `executio
|
||||
|
||||
<UniversalTabs items={['Python', 'Typescript']}>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.python.timeout.worker} block="executiontimeout" />
|
||||
<Snippet src={snippets.python.timeout.worker.execution_timeout} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.typescript.with_timeouts.workflow} block="execution_timeout" />
|
||||
<Snippet src={snippets.typescript.with_timeouts.workflow.execution_timeout} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.go.workflows.timeouts} block="execution_timeout" />
|
||||
<Snippet src={snippets.go.workflows.timeouts.execution_timeout} />
|
||||
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
@@ -72,14 +71,14 @@ For example:
|
||||
|
||||
<UniversalTabs items={['Python', 'Typescript']}>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.python.timeout.worker} block="refreshtimeout" />
|
||||
<Snippet src={snippets.python.timeout.worker.refresh_timeout} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.typescript.with_timeouts.workflow} block="refresh_timeout" />
|
||||
<Snippet src={snippets.typescript.with_timeouts.workflow.refresh_timeout} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
<Snippet src={snips.go.workflows.timeouts} block="refresh_timeout" />
|
||||
<Snippet src={snippets.go.workflows.timeouts.refresh_timeout} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
|
||||
|
||||
@@ -1,14 +1,7 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Tabs, Callout } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
import { GithubSnippet, getSnippets } from "@/components/code";
|
||||
|
||||
export const WorkerAffinityTS = {
|
||||
path: "src/examples/affinity-workers.ts",
|
||||
};
|
||||
|
||||
export const getStaticProps = ({}) => getSnippets([WorkerAffinityTS]);
|
||||
|
||||
# Worker Affinity Assignment (Beta)
|
||||
|
||||
@@ -27,14 +20,11 @@ Labels can be set on workers when they are registered with Hatchet. Labels are k
|
||||
<UniversalTabs items={['Python', 'Typescript']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.affinity_workers.worker} block="affinityworker" />
|
||||
<Snippet src={snippets.python.affinity_workers.worker.affinity_worker} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
{" "}
|
||||
|
||||
<GithubSnippet src={WorkerAffinityTS} target="AffinityWorker" />
|
||||
<Snippet src={snippets.typescript.affinity.affinity_workers.affinity_workflow} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
@@ -70,12 +60,14 @@ You can specify desired worker label state for specific tasks in a workflow by s
|
||||
<UniversalTabs items={['Python', 'Typescript']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.affinity_workers.worker} block="affinityworkflow" />
|
||||
<Snippet src={snippets.python.affinity_workers.worker.affinity_workflow} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
<GithubSnippet src={WorkerAffinityTS} target="AffinityWorkflow" />
|
||||
<Snippet
|
||||
src={snippets.typescript.affinity.affinity_workers.affinity_workflow}
|
||||
/>
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
@@ -125,7 +117,7 @@ Labels can also be set dynamically on workers using the `upsertLabels` method. T
|
||||
<UniversalTabs items={['Python', 'Typescript']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.affinity_workers.worker} block="affinitytask" />
|
||||
<Snippet src={snippets.python.affinity_workers.worker.affinity_task} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
|
||||
# Worker Health Checks
|
||||
|
||||
The Python SDK allows you to enable and ping a healthcheck to check on the status of your worker.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -48,7 +48,7 @@ Declare a worker by calling the `worker` method on the Hatchet client. The `work
|
||||
<Tabs.Tab title="Typescript">
|
||||
### Register the Worker
|
||||
|
||||
<Snippet src={snips.typescript.simple.worker} block="declaring_a_worker" />
|
||||
<Snippet src={snippets.typescript.simple.worker.declaring_a_worker} />
|
||||
|
||||
### Add an Entrypoint Script
|
||||
|
||||
@@ -83,7 +83,7 @@ Declare a worker by calling the `worker` method on the Hatchet client. The `work
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.worker.start} />
|
||||
<Snippet src={snippets.go.worker.start.all} />
|
||||
|
||||
Then start the worker by running the script you just added to your `package.json`:
|
||||
```bash
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Callout, Card, Cards, Steps, Tabs } from "nextra/components";
|
||||
import UniversalTabs from "@/components/UniversalTabs";
|
||||
@@ -36,10 +36,10 @@ def simple(input: SimpleInput, ctx: Context) -> dict[str, str]:
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.simple.workflow} block="declaring_a_task" />
|
||||
<Snippet src={snippets.typescript.simple.workflow.declaring_a_task} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.simple} block="declaring_a_task" />
|
||||
<Snippet src={snippets.go.workflows.simple.declaring_a_task} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
@@ -62,10 +62,10 @@ simple.run(SimpleInput(message="HeLlO WoRlD"))
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Typescript">
|
||||
<Snippet src={snips.typescript.simple.run} block="running_a_task" />
|
||||
<Snippet src={snippets.typescript.simple.run.running_a_task} />
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab title="Go">
|
||||
<Snippet src={snips.go.workflows.simple} block="running_a_task" />
|
||||
<Snippet src={snippets.go.workflows.simple.running_a_task} />
|
||||
</Tabs.Tab>
|
||||
</UniversalTabs>
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Tabs, Steps, Callout, FileTree } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Tabs, Steps, Callout } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { snippets } from "@/lib/generated/snippets";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Tabs, Callout } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
@@ -125,7 +125,7 @@ c.Event().BulkPush(
|
||||
<UniversalTabs items={['Python', 'Typescript', 'Go']}>
|
||||
<Tabs.Tab>
|
||||
|
||||
<Snippet src={snips.python.bulk_fanout.worker} block="bulkfanoutparent" />
|
||||
<Snippet src={snippets.python.bulk_fanout.worker.bulk_fanout_parent} />
|
||||
|
||||
</Tabs.Tab>
|
||||
<Tabs.Tab>
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Tabs, Steps, Callout } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import snips from "@/lib/snips";
|
||||
import { Snippet } from "@/components/code";
|
||||
import { Tabs, Steps, Callout } from "nextra/components";
|
||||
import UniversalTabs from "../../components/UniversalTabs";
|
||||
|
||||
|
||||
@@ -0,0 +1,245 @@
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from dataclasses import asdict, dataclass
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
|
||||
@dataclass
|
||||
class ParsingContext:
|
||||
example_path: str
|
||||
extension: str
|
||||
comment_prefix: str
|
||||
|
||||
|
||||
class SDKParsingContext(Enum):
|
||||
PYTHON = ParsingContext(
|
||||
example_path="sdks/python/examples", extension=".py", comment_prefix="#"
|
||||
)
|
||||
TYPESCRIPT = ParsingContext(
|
||||
example_path="sdks/typescript/src/v1/examples",
|
||||
extension=".ts",
|
||||
comment_prefix="//",
|
||||
)
|
||||
GO = ParsingContext(
|
||||
example_path="pkg/examples", extension=".go", comment_prefix="//"
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Snippet:
|
||||
title: str
|
||||
content: str
|
||||
githubUrl: str
|
||||
language: str
|
||||
codePath: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcessedExample:
|
||||
context: SDKParsingContext
|
||||
filepath: str
|
||||
snippets: list[Snippet]
|
||||
raw_content: str
|
||||
output_path: str
|
||||
|
||||
|
||||
ROOT = "../../"
|
||||
BASE_SNIPPETS_DIR = os.path.join(ROOT, "frontend", "docs", "lib")
|
||||
OUTPUT_DIR = os.path.join(BASE_SNIPPETS_DIR, "generated", "snippets")
|
||||
OUTPUT_GITHUB_ORG = "hatchet-dev"
|
||||
OUTPUT_GITHUB_REPO = "hatchet"
|
||||
IGNORED_FILE_PATTERNS = [
|
||||
r"__init__\.py$",
|
||||
r"test_.*\.py$",
|
||||
r"\.test\.ts$",
|
||||
r"\.test-d\.ts$",
|
||||
r"test_.*\.go$",
|
||||
r"_test\.go$",
|
||||
r"\.e2e\.ts$",
|
||||
]
|
||||
|
||||
|
||||
def to_snake_case(text):
|
||||
text = re.sub(r"[^a-zA-Z0-9\s\-_]", "", text)
|
||||
text = re.sub(r"[-\s]+", "_", text)
|
||||
text = re.sub(r"([a-z0-9])([A-Z])", r"\1_\2", text)
|
||||
text = re.sub(r"([A-Z])([A-Z][a-z])", r"\1_\2", text)
|
||||
text = re.sub(r"_+", "_", text)
|
||||
return text.strip("_").lower()
|
||||
|
||||
|
||||
Title = str
|
||||
Content = str
|
||||
|
||||
|
||||
def parse_snippet_from_block(match: re.Match[str]) -> tuple[Title, Content]:
|
||||
title = to_snake_case(match.group(1).strip())
|
||||
code = match.group(2).strip()
|
||||
|
||||
return title, code
|
||||
|
||||
|
||||
def parse_snippets(ctx: SDKParsingContext, filename: str) -> list[Snippet]:
|
||||
comment_prefix = re.escape(ctx.value.comment_prefix)
|
||||
pattern = rf"{comment_prefix} >\s+(.+?)\n(.*?){comment_prefix} !!"
|
||||
|
||||
subdir = ctx.value.example_path.rstrip("/").lstrip("/")
|
||||
base_path = ROOT + subdir
|
||||
|
||||
with open(filename) as f:
|
||||
content = f.read()
|
||||
|
||||
code_path = f"examples/{ctx.name.lower()}{filename.replace(base_path, '')}"
|
||||
|
||||
github_url = f"https://github.com/{OUTPUT_GITHUB_ORG}/{OUTPUT_GITHUB_REPO}/tree/main/{code_path}"
|
||||
|
||||
matches = list(re.finditer(pattern, content, re.DOTALL))
|
||||
|
||||
if not matches:
|
||||
return [
|
||||
Snippet(
|
||||
title="all",
|
||||
content=content,
|
||||
githubUrl=github_url,
|
||||
language=ctx.name.lower(),
|
||||
codePath=code_path,
|
||||
)
|
||||
]
|
||||
|
||||
return [
|
||||
Snippet(
|
||||
title=x[0],
|
||||
content=x[1],
|
||||
githubUrl=github_url,
|
||||
language=ctx.name.lower(),
|
||||
codePath=code_path,
|
||||
)
|
||||
for match in matches
|
||||
if (x := parse_snippet_from_block(match))
|
||||
]
|
||||
|
||||
|
||||
def process_example(ctx: SDKParsingContext, filename: str) -> ProcessedExample:
|
||||
with open(filename) as f:
|
||||
content = f.read()
|
||||
return ProcessedExample(
|
||||
context=ctx,
|
||||
filepath=filename,
|
||||
output_path=f"examples/{ctx.name.lower()}{filename.replace(ROOT + ctx.value.example_path, '')}",
|
||||
snippets=parse_snippets(ctx, filename),
|
||||
raw_content=content,
|
||||
)
|
||||
|
||||
|
||||
def process_examples() -> list[ProcessedExample]:
|
||||
examples: list[ProcessedExample] = []
|
||||
|
||||
for ctx in SDKParsingContext:
|
||||
subdir = ctx.value.example_path.rstrip("/").lstrip("/")
|
||||
base_path = ROOT + subdir
|
||||
path = base_path + "/**/*" + ctx.value.extension
|
||||
|
||||
examples.extend(
|
||||
[
|
||||
process_example(ctx, filename)
|
||||
for filename in glob.iglob(path, recursive=True)
|
||||
if not any(
|
||||
re.search(pattern, filename) for pattern in IGNORED_FILE_PATTERNS
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
return examples
|
||||
|
||||
|
||||
def create_snippet_tree(examples: list[ProcessedExample]) -> dict[str, dict[str, Any]]:
|
||||
tree: dict[str, Any] = {}
|
||||
|
||||
for example in examples:
|
||||
keys = (
|
||||
example.output_path.replace("examples/", "")
|
||||
.replace(example.context.value.extension, "")
|
||||
.split("/")
|
||||
)
|
||||
|
||||
for snippet in example.snippets:
|
||||
full_keys = keys + [snippet.title]
|
||||
|
||||
current = tree
|
||||
for key in full_keys[:-1]:
|
||||
key = to_snake_case(key)
|
||||
if key not in current:
|
||||
current[key] = {}
|
||||
current = current[key]
|
||||
|
||||
current[full_keys[-1]] = asdict(snippet)
|
||||
|
||||
return tree
|
||||
|
||||
|
||||
def is_excluded_line(line: str, comment_prefix: str) -> bool:
|
||||
end_pattern = f"{comment_prefix} !!"
|
||||
return line.strip() == end_pattern or "eslint-disable" in line or "HH-" in line
|
||||
|
||||
|
||||
def process_line_content(line: str) -> str:
|
||||
return line.replace("@hatchet/", "@hatchet-dev/typescript-sdk/")
|
||||
|
||||
|
||||
def clean_example_content(content: str, comment_prefix: str) -> str:
|
||||
lines = content.split("\n")
|
||||
|
||||
return "\n".join(
|
||||
[
|
||||
process_line_content(line)
|
||||
for line in lines
|
||||
if not is_excluded_line(line, comment_prefix)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def write_examples(examples: list[ProcessedExample]) -> None:
|
||||
for example in examples:
|
||||
out_path = os.path.join(ROOT, example.output_path)
|
||||
out_dir = os.path.dirname(out_path)
|
||||
os.makedirs(out_dir, exist_ok=True)
|
||||
|
||||
with open(out_path, "w") as f:
|
||||
f.write(
|
||||
clean_example_content(
|
||||
example.raw_content, example.context.value.comment_prefix
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
processed_examples = process_examples()
|
||||
|
||||
tree = create_snippet_tree(processed_examples)
|
||||
|
||||
print(f"Writing snippets to {OUTPUT_DIR}/index.ts")
|
||||
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
||||
|
||||
with open(os.path.join(OUTPUT_DIR, "index.ts"), "w") as f:
|
||||
f.write("export const snippets = ")
|
||||
json.dump(tree, f, indent=2)
|
||||
f.write(" as const;\n")
|
||||
|
||||
snippet_type = (
|
||||
"export type Snippet = {\n"
|
||||
" title: string;\n"
|
||||
" content: string;\n"
|
||||
" githubUrl: string;\n"
|
||||
" codePath: string;\n"
|
||||
f" language: {' | '.join([f"'{v.name.lower()}'" for v in SDKParsingContext])}\n"
|
||||
"};\n"
|
||||
)
|
||||
|
||||
print(f"Writing snippet type to {BASE_SNIPPETS_DIR}/snippet.ts")
|
||||
with open(os.path.join(BASE_SNIPPETS_DIR, "snippet.ts"), "w") as f:
|
||||
f.write(snippet_type)
|
||||
|
||||
write_examples(processed_examples)
|
||||
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:prettier/recommended"
|
||||
],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": "latest",
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint",
|
||||
"prettier",
|
||||
"unused-imports"
|
||||
],
|
||||
"rules": {
|
||||
"prettier/prettier": "error",
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": "off",
|
||||
"unused-imports/no-unused-imports": "error",
|
||||
"unused-imports/no-unused-vars": [
|
||||
"warn",
|
||||
{
|
||||
"vars": "all",
|
||||
"varsIgnorePattern": "^_",
|
||||
"args": "after-used",
|
||||
"argsIgnorePattern": "^_"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
# Dependencies
|
||||
node_modules/
|
||||
|
||||
# Build output
|
||||
dist/
|
||||
|
||||
# IDE
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea/
|
||||
*.code-workspace
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
|
||||
out/
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"semi": true,
|
||||
"tabWidth": 2,
|
||||
"printWidth": 100,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all"
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
# Snips Library
|
||||
|
||||
A TypeScript utility library for use within the Hatchet monorepo.
|
||||
|
||||
## Installation
|
||||
|
||||
This package is intended for internal use within the Hatchet monorepo. You can add it to your project by adding it as a dependency in your package.json:
|
||||
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { createClient, wrapResult } from '@hatchet/snips';
|
||||
|
||||
// Create a configured client
|
||||
const client = createClient({
|
||||
baseUrl: 'https://api.example.com',
|
||||
});
|
||||
|
||||
// Use the wrapResult utility to handle errors
|
||||
const fetchData = async () => {
|
||||
const result = await wrapResult(fetch('https://api.example.com/data'));
|
||||
|
||||
if (result.success) {
|
||||
return result.data;
|
||||
} else {
|
||||
console.error(result.error);
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
pnpm install
|
||||
|
||||
# Build the package
|
||||
pnpm build
|
||||
|
||||
# Watch for changes during development
|
||||
pnpm dev
|
||||
|
||||
# Lint code
|
||||
pnpm lint:check
|
||||
|
||||
# Fix linting issues
|
||||
pnpm lint:fix
|
||||
```
|
||||
|
||||
## Adding to the Library
|
||||
|
||||
When adding new functionality to this library, please follow these guidelines:
|
||||
|
||||
1. Add types to `src/types.ts`
|
||||
2. Add utility functions to appropriate files in `src/`
|
||||
3. Export public API from `src/index.ts`
|
||||
4. Update documentation
|
||||
5. Run tests if applicable
|
||||
@@ -1,16 +0,0 @@
|
||||
import type { Config } from 'jest';
|
||||
|
||||
const config: Config = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
roots: ['<rootDir>/src'],
|
||||
transform: {
|
||||
'^.+\\.tsx?$': 'ts-jest',
|
||||
},
|
||||
moduleNameMapper: {
|
||||
'^snips.config$': '<rootDir>/snips.config.ts',
|
||||
'^@/(.*)$': '<rootDir>/src/$1',
|
||||
},
|
||||
};
|
||||
|
||||
export default config;
|
||||
@@ -1,56 +0,0 @@
|
||||
{
|
||||
"name": "@hatchet/snips",
|
||||
"private": true,
|
||||
"version": "0.1.0",
|
||||
"packageManager": "pnpm@9.15.4",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts"
|
||||
}
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"dev": "tsc --watch",
|
||||
"lint:check": "npm run eslint:check && npm run prettier:check",
|
||||
"lint:fix": "npm run eslint:fix && npm run prettier:fix",
|
||||
"eslint:check": "eslint \"{src,apps,libs,test}/**/*.{ts,tsx,js}\"",
|
||||
"eslint:fix": "eslint \"{src,apps,libs,test}/**/*.{ts,tsx,js}\" --fix",
|
||||
"prettier:check": "prettier \"src/**/*.{ts,tsx}\" --list-different",
|
||||
"prettier:fix": "prettier \"src/**/*.{ts,tsx}\" --write",
|
||||
"generate": "rm -rf out && mkdir -p out && cp -r ../../examples ./out/examples && tsc && ts-node src/scripts/build-tree.ts",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch",
|
||||
"copy:docs": "rm -rf ../docs/lib/generated/snips && mkdir -p ../docs/lib/generated && cp -r ./out/snips ../docs/lib/generated/snips",
|
||||
"copy:examples": "rm -rf ../../examples && cp -r ./out/examples ../../",
|
||||
"copy:all": "npm run copy:docs && npm run copy:examples",
|
||||
"generate:copy": "npm run generate && npm run copy:all"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": "^20.17.28",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.8.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^29.5.14",
|
||||
"@typescript-eslint/eslint-plugin": "^6.21.0",
|
||||
"@typescript-eslint/parser": "^6.21.0",
|
||||
"eslint": "^8.57.1",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-import": "^2.31.0",
|
||||
"eslint-plugin-prettier": "^5.2.5",
|
||||
"eslint-plugin-unused-imports": "^3.2.0",
|
||||
"jest": "^29.7.0",
|
||||
"prettier": "^3.5.3",
|
||||
"ts-jest": "^29.3.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": ">=4.7.4"
|
||||
}
|
||||
}
|
||||
Generated
-4635
File diff suppressed because it is too large
Load Diff
@@ -1,76 +0,0 @@
|
||||
import { Config } from '@/utils/config';
|
||||
|
||||
export const config: Config = {
|
||||
// Directories to process
|
||||
SOURCE_DIRS: ['./test_dir/src'],
|
||||
|
||||
// Output directory
|
||||
OUTPUT_DIR: 'out',
|
||||
|
||||
// Files to preserve during removal
|
||||
PRESERVE_FILES: [
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
'pyproject.toml',
|
||||
'README.md',
|
||||
'tsconfig.json',
|
||||
],
|
||||
|
||||
// Files and directories to ignore during copying
|
||||
IGNORE_LIST: [
|
||||
// Test files and directories
|
||||
'test',
|
||||
'tests',
|
||||
'__tests__',
|
||||
'*.test.*',
|
||||
'*.spec.*',
|
||||
'*.test-d.*',
|
||||
|
||||
// Python specific
|
||||
'__pycache__',
|
||||
'.pytest_cache',
|
||||
'*.pyc',
|
||||
|
||||
// System files
|
||||
'.DS_Store',
|
||||
|
||||
// Development directories
|
||||
'node_modules',
|
||||
'.git',
|
||||
'*.log',
|
||||
'*.tmp',
|
||||
'.env',
|
||||
'.venv',
|
||||
'venv',
|
||||
'dist',
|
||||
'build',
|
||||
],
|
||||
|
||||
// Text replacements to perform on copied files
|
||||
REPLACEMENTS: [
|
||||
{
|
||||
from: '@hatchet',
|
||||
to: '@hatchet-dev/typescript-sdk',
|
||||
},
|
||||
],
|
||||
|
||||
// Patterns to remove from code files
|
||||
REMOVAL_PATTERNS: [
|
||||
{
|
||||
regex: /^\s*(\/\/|#)\s*HH-.*$/gm,
|
||||
description: 'HH- style comments',
|
||||
},
|
||||
{
|
||||
regex: /^\s*(\/\/|#)\s*!!\s*$/gm,
|
||||
description: 'End marker comments',
|
||||
},
|
||||
{
|
||||
regex: /^\s*\/\*\s*eslint-.*\*\/$/gm,
|
||||
description: 'ESLint disable block comments',
|
||||
},
|
||||
{
|
||||
regex: /\s*(\/\/|#)\s*eslint-disable-next-line.*$/gm,
|
||||
description: 'ESLint disable line comments',
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -1,98 +0,0 @@
|
||||
import { Config } from '@/utils/config';
|
||||
|
||||
export const config: Config = {
|
||||
// Directories to process
|
||||
SOURCE_DIRS: {
|
||||
typescript: '../../sdks/typescript/src/v1/examples',
|
||||
python: '../../sdks/python/examples',
|
||||
go: '../../pkg/examples',
|
||||
},
|
||||
|
||||
// Output directory
|
||||
OUTPUT_DIR: 'out',
|
||||
|
||||
// Files to preserve during removal
|
||||
PRESERVE_FILES: [
|
||||
'package.json',
|
||||
'pnpm-lock.yaml',
|
||||
'pyproject.toml',
|
||||
'README.md',
|
||||
'tsconfig.json',
|
||||
],
|
||||
|
||||
// Files and directories to ignore during copying
|
||||
IGNORE_LIST: [
|
||||
// Test files and directories
|
||||
'test',
|
||||
'tests',
|
||||
'__tests__',
|
||||
'*.test.*',
|
||||
'*.spec.*',
|
||||
'*.test-d.*',
|
||||
|
||||
'package-lock.json',
|
||||
'pnpm-lock.yaml',
|
||||
'package.json',
|
||||
|
||||
// Python specific
|
||||
'__pycache__',
|
||||
'.pytest_cache',
|
||||
'*.pyc',
|
||||
|
||||
// Go specific
|
||||
'go.mod',
|
||||
'go.sum',
|
||||
'*_test.go',
|
||||
|
||||
// System files
|
||||
'.DS_Store',
|
||||
|
||||
// Development directories
|
||||
'node_modules',
|
||||
'.git',
|
||||
'*.log',
|
||||
'*.tmp',
|
||||
'.env',
|
||||
'.venv',
|
||||
'venv',
|
||||
'dist',
|
||||
'build',
|
||||
],
|
||||
|
||||
// Text replacements to perform on copied files
|
||||
REPLACEMENTS: [
|
||||
{
|
||||
from: '@hatchet',
|
||||
to: '@hatchet-dev/typescript-sdk',
|
||||
fileTypes: ['ts'],
|
||||
},
|
||||
],
|
||||
|
||||
// Patterns to remove from code files
|
||||
REMOVAL_PATTERNS: [
|
||||
{
|
||||
regex: '# HH-',
|
||||
description: 'HH- style comments',
|
||||
},
|
||||
{
|
||||
regex: '# !!',
|
||||
description: 'End marker comments',
|
||||
},
|
||||
{
|
||||
regex: '// !!',
|
||||
description: 'End marker comments',
|
||||
},
|
||||
{
|
||||
regex: '// HH-',
|
||||
description: 'HH- style comments',
|
||||
},
|
||||
{
|
||||
regex: /^\s*\/\*\s*eslint-.*\*\/$/gm,
|
||||
description: 'ESLint disable block comments',
|
||||
},
|
||||
{
|
||||
regex: /\s*(\/\/|#)\s*eslint-disable-next-line.*$/gm,
|
||||
description: 'ESLint disable line comments',
|
||||
},
|
||||
],
|
||||
};
|
||||
@@ -1,23 +0,0 @@
|
||||
type ContentProcessorProps = {
|
||||
path: string;
|
||||
name: string;
|
||||
content: string;
|
||||
};
|
||||
|
||||
type ProcessedFile = {
|
||||
filename?: string;
|
||||
content: string;
|
||||
outDir?: string;
|
||||
};
|
||||
|
||||
type DirectoryProcessorProps = {
|
||||
dir: string;
|
||||
};
|
||||
|
||||
export type ContentProcessor = (props: ContentProcessorProps) => Promise<ProcessedFile[]>;
|
||||
export type DirectoryProcessor = (props: DirectoryProcessorProps) => Promise<void>;
|
||||
|
||||
export type Processor = {
|
||||
processFile: ContentProcessor;
|
||||
processDirectory: DirectoryProcessor;
|
||||
};
|
||||
@@ -1,257 +0,0 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { getConfig } from '../../utils/config';
|
||||
import { Snippet, LANGUAGE_MAP, Block, Highlight } from '../../types';
|
||||
import { ContentProcessor, DirectoryProcessor, Processor } from '../processor.interface';
|
||||
import * as path from 'path';
|
||||
import { Dirent } from 'fs';
|
||||
|
||||
const TOKENS = {
|
||||
BLOCK: {
|
||||
START: '>',
|
||||
END: '!!',
|
||||
},
|
||||
HIGHLIGHT: {
|
||||
START: 'HH-',
|
||||
},
|
||||
};
|
||||
|
||||
const getFileName = (name: string) => {
|
||||
const lastDotIndex = name.lastIndexOf('.');
|
||||
const extension = lastDotIndex !== -1 ? name.slice(lastDotIndex + 1) : '';
|
||||
const fileName = lastDotIndex !== -1 ? name.slice(0, lastDotIndex) : name;
|
||||
|
||||
if (name.startsWith('.')) {
|
||||
return { extension, fileName: extension };
|
||||
}
|
||||
|
||||
return { extension, fileName };
|
||||
};
|
||||
|
||||
const sanitizeContent = (content: string, extension: string) => {
|
||||
const { REMOVAL_PATTERNS, REPLACEMENTS } = getConfig();
|
||||
|
||||
let cleanedContent = content;
|
||||
|
||||
// First remove entire lines that match removal patterns
|
||||
const lines = cleanedContent.split('\n');
|
||||
cleanedContent = lines
|
||||
.filter((line) => !REMOVAL_PATTERNS.some((pattern) => line.match(pattern.regex)))
|
||||
.join('\n');
|
||||
|
||||
// Then apply replacements
|
||||
for (const replacement of REPLACEMENTS) {
|
||||
if (!replacement.fileTypes || replacement.fileTypes.includes(extension)) {
|
||||
cleanedContent = cleanedContent.replaceAll(replacement.from, replacement.to);
|
||||
}
|
||||
}
|
||||
|
||||
return cleanedContent;
|
||||
};
|
||||
|
||||
const getCommentStyle = (language: string) => (language === 'python' ? '#' : '//');
|
||||
|
||||
const removeLine = (content: string): boolean => {
|
||||
const { REMOVAL_PATTERNS } = getConfig();
|
||||
return REMOVAL_PATTERNS.some((pattern) => content.match(pattern.regex));
|
||||
};
|
||||
|
||||
const processBlocks = (content: string, language: string): { blocks: { [key: string]: Block } } => {
|
||||
const lines = content.split('\n');
|
||||
const blocks: { [key: string]: Block } = {};
|
||||
let currentBlock: { start: number; key: string } | null = null;
|
||||
let removedLines = 0;
|
||||
|
||||
const commentStyle = getCommentStyle(language);
|
||||
|
||||
lines.forEach((line, index) => {
|
||||
const trimmedLine = line.trim();
|
||||
const currentLineNumber = index + 1 - removedLines; // Adjust for removed lines
|
||||
|
||||
if (trimmedLine.startsWith(`${commentStyle} ${TOKENS.BLOCK.START}`)) {
|
||||
const key = trimmedLine.replaceAll(`${commentStyle} ${TOKENS.BLOCK.START}`, '').trim();
|
||||
currentBlock = { start: currentLineNumber + 1, key }; // Start on next line
|
||||
} else if (trimmedLine.startsWith(`${commentStyle} ${TOKENS.BLOCK.END}`) && currentBlock) {
|
||||
blocks[normalizeKey(currentBlock.key)] = {
|
||||
start: currentBlock.start,
|
||||
stop: currentLineNumber - 1, // -1 because we want the line before the !!
|
||||
};
|
||||
currentBlock = null;
|
||||
}
|
||||
|
||||
if (removeLine(trimmedLine)) {
|
||||
removedLines++;
|
||||
}
|
||||
});
|
||||
|
||||
return { blocks };
|
||||
};
|
||||
|
||||
const normalizeKey = (key: string) =>
|
||||
key
|
||||
.toLowerCase()
|
||||
.replaceAll(/ /g, '_')
|
||||
.replaceAll(/[-]/g, '_')
|
||||
.replaceAll(/[^a-z0-9_]/g, '');
|
||||
|
||||
const processHighlights = (content: string, language: string): { [key: string]: Highlight } => {
|
||||
const lines = content.split('\n');
|
||||
const highlights: { [key: string]: Highlight } = {};
|
||||
let removedLines = 0;
|
||||
|
||||
const commentStyle = getCommentStyle(language);
|
||||
|
||||
lines.forEach((line, index) => {
|
||||
const trimmedLine = line.trim();
|
||||
const currentLineNumber = index - removedLines;
|
||||
|
||||
const highlightMatch = trimmedLine.match(
|
||||
new RegExp(`${commentStyle} ${TOKENS.HIGHLIGHT.START}([^ ]+) (\\d+)(?: (.*))?`),
|
||||
);
|
||||
if (highlightMatch) {
|
||||
const [, key, lineCountStr, stringsStr] = highlightMatch;
|
||||
const lineCount = parseInt(lineCountStr, 10);
|
||||
const strings = stringsStr ? stringsStr.split(',').map((s) => s.trim()) : [];
|
||||
|
||||
// Calculate all line numbers to highlight
|
||||
const startLine = currentLineNumber + 1;
|
||||
const lines = Array.from({ length: lineCount }, (_, i) => startLine + i);
|
||||
|
||||
highlights[normalizeKey(key)] = {
|
||||
lines,
|
||||
strings,
|
||||
};
|
||||
}
|
||||
|
||||
if (removeLine(trimmedLine)) {
|
||||
removedLines++;
|
||||
}
|
||||
});
|
||||
|
||||
return highlights;
|
||||
};
|
||||
|
||||
const processBlocksAndHighlights = (
|
||||
content: string,
|
||||
language: string,
|
||||
): { blocks: { [key: string]: Block }; highlights: { [key: string]: Highlight } } => {
|
||||
const { blocks } = processBlocks(content, language);
|
||||
const highlights = processHighlights(content, language);
|
||||
return { blocks, highlights };
|
||||
};
|
||||
|
||||
/**
|
||||
* Processes content by creating a TypeScript string
|
||||
* that exports a default Snippet with that content.
|
||||
*/
|
||||
const processSnippet: ContentProcessor = async ({ path, name, content }) => {
|
||||
const parsedName = getFileName(name);
|
||||
|
||||
if (!parsedName) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const { extension, fileName } = parsedName;
|
||||
|
||||
const language =
|
||||
extension && extension in LANGUAGE_MAP
|
||||
? LANGUAGE_MAP[extension as keyof typeof LANGUAGE_MAP]
|
||||
: 'unknown';
|
||||
|
||||
const cleanedContent = sanitizeContent(content, extension);
|
||||
const { blocks, highlights } = processBlocksAndHighlights(content, language);
|
||||
|
||||
const snippet: Snippet = {
|
||||
language,
|
||||
content: cleanedContent,
|
||||
source: path,
|
||||
blocks,
|
||||
highlights,
|
||||
};
|
||||
|
||||
const tsContent = `import { Snippet } from '@/lib/generated/snips/types';
|
||||
|
||||
const snippet: Snippet = ${JSON.stringify(snippet, null, 2)};
|
||||
|
||||
export default snippet;
|
||||
`;
|
||||
|
||||
return [
|
||||
{
|
||||
content: cleanedContent,
|
||||
outDir: 'examples',
|
||||
},
|
||||
{
|
||||
filename: `${fileName}.ts`,
|
||||
content: tsContent,
|
||||
outDir: 'snips',
|
||||
},
|
||||
];
|
||||
};
|
||||
|
||||
const processDirectory: DirectoryProcessor = async ({ dir }) => {
|
||||
if (!dir.includes('snips')) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (dir.endsWith('snips')) {
|
||||
//copy types.ts to the directory
|
||||
const typesPath = path.join(__dirname, '../../types.ts');
|
||||
await fs.copyFile(typesPath, path.join(dir, 'types.ts'));
|
||||
console.log(`Copied types.ts to ${dir}`);
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
const snippets = entries.filter(
|
||||
(entry: Dirent) => entry.isFile() && entry.name.endsWith('.ts') && entry.name !== 'index.ts',
|
||||
);
|
||||
const directories = entries.filter((entry: Dirent) => entry.isDirectory());
|
||||
|
||||
if (snippets.length === 0 && directories.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Generate import and export statements for files
|
||||
const fileImports = snippets.map((file: Dirent) => {
|
||||
console.log(file.name);
|
||||
const baseName = sanitizeFileName(file.name);
|
||||
return `import ${baseName} from './${file.name.replaceAll('.ts', '')}';`;
|
||||
});
|
||||
|
||||
const fileExports = snippets.map((file: Dirent) => {
|
||||
const baseName = sanitizeFileName(file.name);
|
||||
return `export { ${baseName} }`;
|
||||
});
|
||||
|
||||
// Generate import and export statements for directories
|
||||
const dirImports = directories.map((dir: Dirent) => {
|
||||
const importName = sanitizeFileName(dir.name);
|
||||
return `import * as ${importName} from './${dir.name}';`;
|
||||
});
|
||||
|
||||
const dirExports = directories.map((dir: Dirent) => {
|
||||
const importName = sanitizeFileName(dir.name);
|
||||
return `export { ${importName} };`;
|
||||
});
|
||||
|
||||
const indexContent = [...fileImports, ...dirImports, '', ...fileExports, ...dirExports, ''].join(
|
||||
'\n',
|
||||
);
|
||||
|
||||
// Write the index.ts file
|
||||
await fs.writeFile(path.join(dir, 'index.ts'), indexContent, 'utf-8');
|
||||
};
|
||||
|
||||
const sanitizeFileName = (name: string) => {
|
||||
return name
|
||||
.toLowerCase()
|
||||
.replaceAll('.ts', '')
|
||||
.replaceAll('do', '_do')
|
||||
.replaceAll(/[-]/g, '_')
|
||||
.replaceAll(/ /g, '_')
|
||||
.replaceAll(/[^a-z0-9_]/g, '');
|
||||
};
|
||||
|
||||
export const snippetProcessor: Processor = {
|
||||
processFile: processSnippet,
|
||||
processDirectory: processDirectory,
|
||||
};
|
||||
@@ -1,40 +0,0 @@
|
||||
import { processFiles } from './build-tree';
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
async function compareDirectories(actualDir: string, expectedDir: string) {
|
||||
const actualFiles = await fs.readdir(actualDir);
|
||||
const expectedFiles = await fs.readdir(expectedDir);
|
||||
|
||||
// Check that both directories have exactly the same files
|
||||
expect(actualFiles.sort()).toEqual(expectedFiles.sort());
|
||||
|
||||
for (const file of actualFiles) {
|
||||
const actualPath = path.join(actualDir, file);
|
||||
const expectedPath = path.join(expectedDir, file);
|
||||
|
||||
const actualStat = await fs.stat(actualPath);
|
||||
const expectedStat = await fs.stat(expectedPath);
|
||||
|
||||
if (actualStat.isDirectory() && expectedStat.isDirectory()) {
|
||||
await compareDirectories(actualPath, expectedPath);
|
||||
} else if (actualStat.isFile() && expectedStat.isFile()) {
|
||||
const actualContent = await fs.readFile(actualPath, 'utf8');
|
||||
const expectedContent = await fs.readFile(expectedPath, 'utf8');
|
||||
expect(actualContent).toEqual(expectedContent);
|
||||
} else {
|
||||
throw new Error(`Mismatched types for ${file}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
describe('processFiles', () => {
|
||||
it('should process files correctly', async () => {
|
||||
await processFiles();
|
||||
|
||||
const actualDir = path.join(__dirname, '../../out');
|
||||
const expectedDir = path.join(__dirname, '../../test_dir/expected');
|
||||
|
||||
await compareDirectories(actualDir, expectedDir);
|
||||
});
|
||||
});
|
||||
@@ -1,232 +0,0 @@
|
||||
import { getConfig } from '../utils/config';
|
||||
import { promises as fs } from 'fs';
|
||||
import { Dirent } from 'fs';
|
||||
import * as path from 'path';
|
||||
import { clean, restore } from './clean-build';
|
||||
import { colors } from '../utils/colors';
|
||||
import { Processor } from '@/processors/processor.interface';
|
||||
|
||||
/**
|
||||
* Processes files in the source directory, preserving the directory structure
|
||||
* but transforming each file into a TypeScript file that exports a Snippet.
|
||||
*/
|
||||
export const processFiles = async (): Promise<string[]> => {
|
||||
const config = getConfig();
|
||||
const startTime = Date.now();
|
||||
const { SOURCE_DIRS, OUTPUT_DIR, IGNORE_LIST, PRESERVE_FILES } = config;
|
||||
|
||||
console.log(`${colors.bright}${colors.blue}🚀 Starting snips processing...${colors.reset}`);
|
||||
console.log(
|
||||
`${colors.cyan}Source directories: ${Object.keys(SOURCE_DIRS).join(', ')}${colors.reset}`,
|
||||
);
|
||||
console.log(`${colors.cyan}Output directory: ${OUTPUT_DIR}${colors.reset}`);
|
||||
|
||||
// Handle case when no source directories are provided
|
||||
if (!SOURCE_DIRS || Object.keys(SOURCE_DIRS).length === 0) {
|
||||
console.log(`${colors.red}No source directories provided!${colors.reset}`);
|
||||
return [];
|
||||
}
|
||||
|
||||
// Ensure output directory exists
|
||||
try {
|
||||
await fs.mkdir(OUTPUT_DIR, { recursive: true });
|
||||
console.log(`${colors.green}✓ Output directory created/verified: ${OUTPUT_DIR}${colors.reset}`);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`${colors.red}Error creating output directory ${OUTPUT_DIR}:${colors.reset}`,
|
||||
error,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Clean the output directory first
|
||||
const toRestore = await clean(OUTPUT_DIR, PRESERVE_FILES);
|
||||
|
||||
// Process directories
|
||||
for (const [language, sourceDir] of Object.entries(SOURCE_DIRS)) {
|
||||
console.log(`${colors.magenta}Processing directory: ${sourceDir}${colors.reset}`);
|
||||
// Recursively process the directory
|
||||
await processDirectory(sourceDir, path.join(OUTPUT_DIR, language), IGNORE_LIST);
|
||||
}
|
||||
|
||||
// Restore the preserved files
|
||||
await restore(toRestore);
|
||||
|
||||
// Process all directories with all processors recursively
|
||||
const { PROCESSORS } = config;
|
||||
for (const processor of PROCESSORS) {
|
||||
console.log(
|
||||
`${colors.magenta}Running directory processor recursively: ${processor.constructor.name}${colors.reset}`,
|
||||
);
|
||||
try {
|
||||
await processFinalDirectoryRecursively(OUTPUT_DIR, processor);
|
||||
} catch (error) {
|
||||
console.error(`${colors.red}Error processing directory recursively:${colors.reset}`, error);
|
||||
// Continue with other processors even if one fails
|
||||
}
|
||||
}
|
||||
|
||||
const endTime = Date.now();
|
||||
const duration = (endTime - startTime) / 1000;
|
||||
console.log(
|
||||
`${colors.bright}${colors.green}✓ Processing complete in ${duration} seconds!${colors.reset}`,
|
||||
);
|
||||
|
||||
// Return files from the first directory for testing purposes
|
||||
try {
|
||||
return await fs.readdir(Object.values(SOURCE_DIRS)[0]);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`${colors.red}Error reading directory ${Object.values(SOURCE_DIRS)[0]}:${colors.reset}`,
|
||||
error,
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Ensures a directory exists, creating it if necessary
|
||||
*/
|
||||
const ensureDirectoryExists = async (dirPath: string): Promise<void> => {
|
||||
await fs.mkdir(dirPath, { recursive: true });
|
||||
};
|
||||
|
||||
/**
|
||||
* Processes a single file, applying all processors and writing the result
|
||||
*/
|
||||
const processFile = async (
|
||||
sourcePath: string,
|
||||
outputPath: string,
|
||||
entry: Dirent,
|
||||
processors: Processor[],
|
||||
): Promise<void> => {
|
||||
const content = await fs.readFile(sourcePath, 'utf-8');
|
||||
|
||||
processors.forEach(async (processor) => {
|
||||
const results = await processor.processFile({
|
||||
path: outputPath,
|
||||
name: entry.name,
|
||||
content: content,
|
||||
});
|
||||
|
||||
await Promise.all(
|
||||
results.map(async (result) => {
|
||||
const previousPath = outputPath;
|
||||
|
||||
const previousPathParts = previousPath.split('/');
|
||||
let currentOutputPath = path.join(
|
||||
previousPathParts[0],
|
||||
result.outDir || '',
|
||||
...previousPathParts.slice(1),
|
||||
);
|
||||
|
||||
if (result.filename) {
|
||||
const previousPath = currentOutputPath;
|
||||
currentOutputPath = path.join(path.dirname(currentOutputPath), result.filename);
|
||||
console.log(
|
||||
`${colors.yellow} ⟳ Processor changed filename: ${path.basename(previousPath)} → ${result.filename}${colors.reset}`,
|
||||
);
|
||||
}
|
||||
|
||||
await ensureDirectoryExists(path.dirname(currentOutputPath));
|
||||
await fs.writeFile(currentOutputPath, result.content, 'utf-8');
|
||||
|
||||
console.log(
|
||||
`${colors.green} ✓ Processed file written to: ${currentOutputPath}${colors.reset}`,
|
||||
);
|
||||
}),
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Processes a single directory entry (file or subdirectory)
|
||||
*/
|
||||
const processEntry = async (
|
||||
entry: Dirent,
|
||||
sourcePath: string,
|
||||
outputDir: string,
|
||||
ignoreList: string[] | RegExp[],
|
||||
processors: Processor[],
|
||||
): Promise<void> => {
|
||||
if (shouldIgnore(entry.name, ignoreList)) {
|
||||
console.log(`${colors.yellow}Ignoring: ${sourcePath}${colors.reset}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const targetPath = path.join(outputDir, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
console.log(`${colors.magenta}→ Processing subdirectory: ${sourcePath}${colors.reset}`);
|
||||
await processDirectory(sourcePath, targetPath, ignoreList);
|
||||
} else {
|
||||
console.log(`${colors.blue}→ Processing file: ${sourcePath}${colors.reset}`);
|
||||
await processFile(sourcePath, targetPath, entry, processors);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Recursively processes a directory and its contents
|
||||
*/
|
||||
export const processDirectory = async (
|
||||
sourceDir: string,
|
||||
outputDir: string,
|
||||
ignoreList: string[] | RegExp[],
|
||||
): Promise<void> => {
|
||||
try {
|
||||
const { PROCESSORS } = getConfig();
|
||||
|
||||
const entries = await fs.readdir(sourceDir, { withFileTypes: true });
|
||||
console.log(`${colors.cyan}Found ${entries.length} entries in ${sourceDir}${colors.reset}`);
|
||||
|
||||
await Promise.all(
|
||||
entries.map(async (entry: Dirent) => {
|
||||
const sourcePath = path.join(sourceDir, entry.name);
|
||||
await processEntry(entry, sourcePath, outputDir, ignoreList, PROCESSORS);
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(`${colors.red}Error processing directory ${sourceDir}:${colors.reset}`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if a file or directory should be ignored
|
||||
*/
|
||||
const shouldIgnore = (name: string, ignoreList: string[] | RegExp[]): boolean => {
|
||||
return ignoreList.some((pattern) => {
|
||||
if (pattern instanceof RegExp) {
|
||||
return pattern.test(name);
|
||||
}
|
||||
if (pattern.includes('*')) {
|
||||
// Convert glob pattern to regex
|
||||
const regexPattern = pattern.replace(/\./g, '\\.').replace(/\*/g, '.*');
|
||||
return new RegExp(`^${regexPattern}$`).test(name);
|
||||
}
|
||||
return name === pattern;
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Recursively processes a directory and all its subdirectories with a given processor
|
||||
*/
|
||||
const processFinalDirectoryRecursively = async (
|
||||
dirPath: string,
|
||||
processor: Processor,
|
||||
): Promise<void> => {
|
||||
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
||||
|
||||
// Process the current directory
|
||||
await processor.processDirectory({ dir: dirPath });
|
||||
|
||||
// Process all subdirectories
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const subDirPath = path.join(dirPath, entry.name);
|
||||
await processFinalDirectoryRecursively(subDirPath, processor);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
processFiles();
|
||||
@@ -1,87 +0,0 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import * as path from 'path';
|
||||
import { colors } from '../utils/colors';
|
||||
import { tmpdir } from 'os';
|
||||
/**
|
||||
* Preserved file structure for restoring files later
|
||||
*/
|
||||
interface PreservedFiles {
|
||||
files: string[];
|
||||
tempPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans the output directory by temporarily preserving important files,
|
||||
* removing everything, then returning info needed to restore the preserved files later
|
||||
*/
|
||||
export const clean = async (
|
||||
dir: string,
|
||||
preserveFiles: string[] | RegExp[],
|
||||
tmpDir?: string,
|
||||
): Promise<PreservedFiles> => {
|
||||
if (!tmpDir) {
|
||||
tmpDir = path.join(tmpdir(), `snips-${Date.now()}`);
|
||||
await fs.mkdir(tmpDir, { recursive: true });
|
||||
}
|
||||
|
||||
const toRestore: PreservedFiles['files'] = [];
|
||||
|
||||
// Get all entries in the directory
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const filePath = path.join(dir, entry.name);
|
||||
const stats = await fs.stat(filePath);
|
||||
if (stats.isDirectory()) {
|
||||
// Recursively clean the directory
|
||||
const results = await clean(filePath, preserveFiles, tmpDir);
|
||||
toRestore.push(...results.files);
|
||||
} else {
|
||||
console.log(`${colors.cyan}Checking file: ${entry.name}${colors.reset}`);
|
||||
// Check if the file should be preserved
|
||||
if (
|
||||
preserveFiles.some((p) => (typeof p === 'string' ? p === entry.name : p.test(entry.name)))
|
||||
) {
|
||||
console.log(`${colors.cyan}Preserving file: ${entry.name}${colors.reset}`);
|
||||
// Create the temporary directory structure
|
||||
const tempFilePath = path.join(tmpDir, filePath);
|
||||
await fs.mkdir(path.dirname(tempFilePath), { recursive: true });
|
||||
// Copy the file to temporary storage
|
||||
await fs.copyFile(filePath, tempFilePath);
|
||||
toRestore.push(filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// After preserving files, remove the directory and its contents
|
||||
if (dir !== tmpDir) {
|
||||
try {
|
||||
await fs.rm(dir, { recursive: true, force: true });
|
||||
console.log(`${colors.yellow}Removed directory: ${dir}${colors.reset}`);
|
||||
} catch (error) {
|
||||
console.error(`${colors.red}Error removing directory ${dir}:${colors.reset}`, error);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
files: toRestore,
|
||||
tempPath: tmpDir,
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Restores preserved files from temporary directory to output directory
|
||||
*/
|
||||
export const restore = async ({ files, tempPath }: PreservedFiles): Promise<void> => {
|
||||
// Then restore all files
|
||||
for (const file of files) {
|
||||
try {
|
||||
const tempFilePath = path.join(tempPath, file);
|
||||
const dirPath = path.dirname(file);
|
||||
await fs.mkdir(dirPath, { recursive: true });
|
||||
await fs.copyFile(tempFilePath, file);
|
||||
console.log(`${colors.green}Restored preserved file: ${file}${colors.reset}`);
|
||||
} catch (error) {
|
||||
console.error(`${colors.red}Error restoring file: ${file}${colors.reset}`, error);
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -1,31 +0,0 @@
|
||||
export type Highlight = {
|
||||
lines: number[];
|
||||
strings: string[];
|
||||
};
|
||||
|
||||
export type Block = {
|
||||
start: number;
|
||||
stop: number;
|
||||
};
|
||||
|
||||
// Types for snippets
|
||||
export type Snippet = {
|
||||
content: string;
|
||||
language: string;
|
||||
source: string;
|
||||
blocks?: {
|
||||
[key: string]: Block;
|
||||
};
|
||||
highlights?: {
|
||||
[key: string]: Highlight;
|
||||
};
|
||||
};
|
||||
|
||||
export const LANGUAGE_MAP = {
|
||||
ts: 'typescript ',
|
||||
py: 'python',
|
||||
go: 'go',
|
||||
unknown: 'unknown',
|
||||
};
|
||||
|
||||
export default {};
|
||||
@@ -1,12 +0,0 @@
|
||||
// Color codes for console logs
|
||||
|
||||
export const colors = {
|
||||
reset: '\x1b[0m',
|
||||
bright: '\x1b[1m',
|
||||
green: '\x1b[32m',
|
||||
blue: '\x1b[34m',
|
||||
cyan: '\x1b[36m',
|
||||
yellow: '\x1b[33m',
|
||||
magenta: '\x1b[35m',
|
||||
red: '\x1b[31m',
|
||||
};
|
||||
@@ -1,30 +0,0 @@
|
||||
import { config } from '../../snips.config';
|
||||
import { Processor } from '../processors/processor.interface';
|
||||
import { snippetProcessor } from '../processors/snippets/snippet.processor';
|
||||
|
||||
export type Config = {
|
||||
SOURCE_DIRS: { [key: string]: string };
|
||||
OUTPUT_DIR: string;
|
||||
PRESERVE_FILES: string[] | RegExp[];
|
||||
IGNORE_LIST: string[];
|
||||
REPLACEMENTS: Array<{
|
||||
from: string;
|
||||
to: string;
|
||||
fileTypes?: string[];
|
||||
}>;
|
||||
REMOVAL_PATTERNS: Array<{
|
||||
regex: string | RegExp;
|
||||
description: string;
|
||||
}>;
|
||||
PROCESSORS?: Processor[];
|
||||
};
|
||||
|
||||
const DEFAULT_PROCESSORS: Processor[] = [snippetProcessor];
|
||||
|
||||
export const getConfig = (overrides: Partial<Config> = {}) => {
|
||||
return {
|
||||
PROCESSORS: [...DEFAULT_PROCESSORS],
|
||||
...config,
|
||||
...overrides,
|
||||
};
|
||||
};
|
||||
@@ -1,11 +0,0 @@
|
||||
export const hash = async (content: string): Promise<string> => {
|
||||
// Use the Web Crypto API to generate a SHA-256 digest
|
||||
const msgUint8 = new TextEncoder().encode(content);
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', msgUint8);
|
||||
|
||||
// Convert the ArrayBuffer to hex string
|
||||
return Array.from(new Uint8Array(hashBuffer))
|
||||
.map((b) => b.toString(16).padStart(2, '0'))
|
||||
.slice(0, 8)
|
||||
.join('');
|
||||
};
|
||||
@@ -1 +0,0 @@
|
||||
THIS SHOULD PERSIST
|
||||
@@ -1,5 +0,0 @@
|
||||
import { Snippet } from '@/types';
|
||||
|
||||
const snippet: Snippet = {"language":"unknown","content":"laaaaater","source":"test_dir/src/goodbye/later.txt"};
|
||||
|
||||
export default snippet;
|
||||
@@ -1,5 +0,0 @@
|
||||
import { Snippet } from '@/types';
|
||||
|
||||
const snippet: Snippet = {"language":"python","content":"# nested file\n","source":"test_dir/src/goodbye/nested/nested.py"};
|
||||
|
||||
export default snippet;
|
||||
@@ -1,5 +0,0 @@
|
||||
import { Snippet } from '@/types';
|
||||
|
||||
const snippet: Snippet = {"language":"unknown","content":"✌️","source":"test_dir/src/goodbye/seeya.txt"};
|
||||
|
||||
export default snippet;
|
||||
@@ -1,9 +0,0 @@
|
||||
import { Snippet } from '@/types';
|
||||
|
||||
const snippet: Snippet = {
|
||||
language: 'unknown',
|
||||
content: '👋 hey there, thanks for writing tests this time\nx',
|
||||
source: 'test_dir/src/hello.txt',
|
||||
};
|
||||
|
||||
export default snippet;
|
||||
@@ -1 +0,0 @@
|
||||
laaaaater
|
||||
@@ -1,15 +0,0 @@
|
||||
import random
|
||||
|
||||
def hello() -> str:
|
||||
# ? console log
|
||||
print('hello')
|
||||
# !!
|
||||
|
||||
random.random()
|
||||
|
||||
# HH-random 3
|
||||
if random.random() > 0.5:
|
||||
return 'yo'
|
||||
|
||||
# HH-return 1 'hello'
|
||||
return 'hello'
|
||||
@@ -1 +0,0 @@
|
||||
✌️
|
||||
@@ -1,26 +0,0 @@
|
||||
// import * from "@hatchet"
|
||||
|
||||
function hello() {
|
||||
// > console error
|
||||
console.error('hello');
|
||||
// !!
|
||||
|
||||
// > console log
|
||||
console.log('hello');
|
||||
// !!
|
||||
|
||||
// > multiple lines
|
||||
console.log('hello');
|
||||
console.log('world');
|
||||
// !!
|
||||
|
||||
// HH-random 3
|
||||
if (Math.random() > 0.5) {
|
||||
return 'yo';
|
||||
}
|
||||
|
||||
// HH-return 1 'hello'
|
||||
return 'hello';
|
||||
}
|
||||
|
||||
export default hello;
|
||||
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2021",
|
||||
"module": "CommonJS",
|
||||
"moduleResolution": "node",
|
||||
"lib": ["ES2021", "DOM"],
|
||||
"types": ["node"],
|
||||
"outDir": "dist",
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["./src/*"],
|
||||
"@config": ["snips.config.ts"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*.ts", "scripts/generate-snips.ts", "scripts/copy-examples.ts"],
|
||||
"exclude": ["node_modules", "dist", "**/*.test.ts"]
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
# > Simple
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -0,0 +1,88 @@
|
||||
import { WorkerLabelComparator } from '@hatchet/protoc/workflows';
|
||||
import { hatchet } from '../hatchet-client';
|
||||
|
||||
// > AffinityWorkflow
|
||||
|
||||
const workflow = hatchet.workflow({
|
||||
name: 'affinity-workflow',
|
||||
description: 'test',
|
||||
});
|
||||
|
||||
workflow.task({
|
||||
name: 'step1',
|
||||
fn: async (_, ctx) => {
|
||||
const results: Promise<any>[] = [];
|
||||
// eslint-disable-next-line no-plusplus
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const result = await ctx.spawnWorkflow(childWorkflow.id, {});
|
||||
results.push(result.output);
|
||||
}
|
||||
console.log('Spawned 50 child workflows');
|
||||
console.log('Results:', await Promise.all(results));
|
||||
|
||||
return { step1: 'step1 results!' };
|
||||
},
|
||||
});
|
||||
|
||||
// !!
|
||||
|
||||
const childWorkflow = hatchet.workflow({
|
||||
name: 'child-affinity-workflow',
|
||||
description: 'test',
|
||||
});
|
||||
|
||||
childWorkflow.task({
|
||||
name: 'child-step1',
|
||||
desiredWorkerLabels: {
|
||||
model: {
|
||||
value: 'xyz',
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
fn: async (ctx) => {
|
||||
return { childStep1: 'childStep1 results!' };
|
||||
},
|
||||
});
|
||||
|
||||
childWorkflow.task({
|
||||
name: 'child-step2',
|
||||
desiredWorkerLabels: {
|
||||
memory: {
|
||||
value: 512,
|
||||
required: true,
|
||||
comparator: WorkerLabelComparator.LESS_THAN,
|
||||
},
|
||||
},
|
||||
fn: async (ctx) => {
|
||||
return { childStep2: 'childStep2 results!' };
|
||||
},
|
||||
});
|
||||
|
||||
async function main() {
|
||||
// > AffinityWorker
|
||||
|
||||
const worker1 = await hatchet.worker('affinity-worker-1', {
|
||||
labels: {
|
||||
model: 'abc',
|
||||
memory: 1024,
|
||||
},
|
||||
});
|
||||
|
||||
// !!
|
||||
|
||||
await worker1.registerWorkflow(workflow);
|
||||
await worker1.registerWorkflow(childWorkflow);
|
||||
worker1.start();
|
||||
|
||||
const worker2 = await hatchet.worker('affinity-worker-2', {
|
||||
labels: {
|
||||
model: 'xyz',
|
||||
memory: 512,
|
||||
},
|
||||
});
|
||||
await worker2.registerWorkflow(workflow);
|
||||
await worker2.registerWorkflow(childWorkflow);
|
||||
worker2.start();
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -0,0 +1,78 @@
|
||||
import { Logger, LogLevel } from '@hatchet/util/logger';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import pino from 'pino';
|
||||
import Hatchet from '@hatchet/sdk';
|
||||
import { JsonObject } from '@hatchet/v1';
|
||||
|
||||
// > Create Pino logger
|
||||
const logger = pino();
|
||||
|
||||
class PinoLogger implements Logger {
|
||||
logLevel: LogLevel;
|
||||
context: string;
|
||||
|
||||
constructor(context: string, logLevel: LogLevel = 'DEBUG') {
|
||||
this.logLevel = logLevel;
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
debug(message: string, extra?: JsonObject): void {
|
||||
logger.debug(extra, message);
|
||||
}
|
||||
|
||||
info(message: string, extra?: JsonObject): void {
|
||||
logger.info(extra, message);
|
||||
}
|
||||
|
||||
green(message: string, extra?: JsonObject): void {
|
||||
logger.info(extra, `%c${message}`);
|
||||
}
|
||||
|
||||
warn(message: string, error?: Error, extra?: JsonObject): void {
|
||||
logger.warn(extra, `${message} ${error}`);
|
||||
}
|
||||
|
||||
error(message: string, error?: Error, extra?: JsonObject): void {
|
||||
logger.error(extra, `${message} ${error}`);
|
||||
}
|
||||
|
||||
// optional util method
|
||||
util(key: string, message: string, extra?: JsonObject): void {
|
||||
// for example you may want to expose a trace method
|
||||
if (key === 'trace') {
|
||||
logger.info(extra, 'trace');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const hatchet = Hatchet.init({
|
||||
log_level: 'DEBUG',
|
||||
logger: (ctx, level) => new PinoLogger(ctx, level),
|
||||
});
|
||||
|
||||
// !!
|
||||
|
||||
// > Use the logger
|
||||
|
||||
const workflow = hatchet.task({
|
||||
name: 'byo-logger-example',
|
||||
fn: async (ctx) => {
|
||||
// eslint-disable-next-line no-plusplus
|
||||
for (let i = 0; i < 5; i++) {
|
||||
logger.info(`log message ${i}`);
|
||||
}
|
||||
|
||||
return { step1: 'completed step run' };
|
||||
},
|
||||
});
|
||||
|
||||
// !!
|
||||
|
||||
async function main() {
|
||||
const worker = await hatchet.worker('byo-logger-worker', {
|
||||
workflows: [workflow],
|
||||
});
|
||||
worker.start();
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -0,0 +1,42 @@
|
||||
import { hatchet } from '../hatchet-client';
|
||||
|
||||
const sleep = (ms: number) =>
|
||||
new Promise((resolve) => {
|
||||
setTimeout(resolve, ms);
|
||||
});
|
||||
|
||||
// > Logger
|
||||
|
||||
const workflow = hatchet.workflow({
|
||||
name: 'logger-example',
|
||||
description: 'test',
|
||||
on: {
|
||||
event: 'user:create',
|
||||
},
|
||||
});
|
||||
|
||||
workflow.task({
|
||||
name: 'logger-step1',
|
||||
fn: async (_, ctx) => {
|
||||
// log in a for loop
|
||||
// eslint-disable-next-line no-plusplus
|
||||
for (let i = 0; i < 10; i++) {
|
||||
ctx.logger.info(`log message ${i}`);
|
||||
await sleep(200);
|
||||
}
|
||||
|
||||
return { step1: 'completed step run' };
|
||||
},
|
||||
});
|
||||
|
||||
// !!
|
||||
|
||||
async function main() {
|
||||
const worker = await hatchet.worker('logger-worker', {
|
||||
slots: 1,
|
||||
workflows: [workflow],
|
||||
});
|
||||
await worker.start();
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -1,4 +1,3 @@
|
||||
// > Declaring a Task
|
||||
import sleep from '@hatchet/util/sleep';
|
||||
import { hatchet } from '../hatchet-client';
|
||||
|
||||
|
||||
Reference in New Issue
Block a user