Feat: Scheduled run detail view, bulk cancel / replay with pagination helper (#2416)

* feat: endpoint for listing external ids

* feat: wire up external id list

* chore: regen api

* feat: py sdk wrapper

* fix: since type

* fix: log

* fix: improve defaults for statuses

* feat: docs

* feat: docs

* fix: rm extra file

* feat: add id column to scheduled runs

* feat: side panel for scheduled runs

* fix: side panel header pinned

* fix: border + padding

* chore: gen

* chore: lint

* chore: changelog, version

* fix: spacing of cols

* fix: empty webhook resource limit

* fix: tsc

* fix: sort organizations and tenants alphabetically
This commit is contained in:
matt
2025-10-15 11:36:45 -04:00
committed by GitHub
parent 14894f892b
commit 5b5adcb8ed
48 changed files with 3678 additions and 520 deletions
@@ -316,6 +316,8 @@ V1TaskSummaryList:
$ref: "./v1/task.yaml#/V1TaskSummaryList"
V1WorkflowRunDisplayNameList:
$ref: "./v1/task.yaml#/V1WorkflowRunDisplayNameList"
V1WorkflowRunExternalIdList:
$ref: "./v1/task.yaml#/V1WorkflowRunExternalIdList"
V1TaskSummary:
$ref: "./v1/task.yaml#/V1TaskSummary"
V1DagChildren:
@@ -150,6 +150,7 @@ TenantResource:
- "TASK_RUN"
- "CRON"
- "SCHEDULE"
- 'INCOMING_WEBHOOK'
type: string
TenantResourceLimit:
@@ -181,6 +181,15 @@ V1WorkflowRunDisplayNameList:
- pagination
- rows
V1WorkflowRunExternalIdList:
type: array
items:
type: string
format: uuid
minLength: 36
maxLength: 36
description: The list of external IDs
V1TaskEventList:
properties:
pagination:
+2
View File
@@ -39,6 +39,8 @@ paths:
$ref: "./paths/v1/workflow-runs/workflow_run.yaml#/listWorkflowRuns"
/api/v1/stable/tenants/{tenant}/workflow-runs/display-names:
$ref: "./paths/v1/workflow-runs/workflow_run.yaml#/listWorkflowRunDisplayNames"
/api/v1/stable/tenants/{tenant}/workflow-runs/external-ids:
$ref: "./paths/v1/workflow-runs/workflow_run.yaml#/listWorkflowRunExternalIds"
/api/v1/stable/tenants/{tenant}/workflow-runs/trigger:
$ref: "./paths/v1/workflow-runs/workflow_run.yaml#/trigger"
/api/v1/stable/workflow-runs/{v1-workflow-run}:
@@ -191,6 +191,91 @@ listWorkflowRunDisplayNames:
tags:
- Workflow Runs
listWorkflowRunExternalIds:
get:
x-resources: ["tenant"]
description: Lists external ids for workflow runs matching filters
operationId: v1-workflow-run:external-ids:list
parameters:
- description: The tenant id
in: path
name: tenant
required: true
schema:
type: string
format: uuid
minLength: 36
maxLength: 36
- description: A list of statuses to filter by
in: query
name: statuses
required: false
schema:
type: array
items:
$ref: "../../../components/schemas/_index.yaml#/V1TaskStatus"
- description: The earliest date to filter by
in: query
name: since
required: true
schema:
type: string
format: date-time
- description: The latest date to filter by
in: query
name: until
required: false
schema:
type: string
format: date-time
- description: Additional metadata k-v pairs to filter by
in: query
name: additional_metadata
required: false
schema:
type: array
items:
type: string
- description: The workflow ids to find runs for
in: query
name: workflow_ids
required: false
schema:
type: array
items:
type: string
format: uuid
minLength: 36
maxLength: 36
responses:
"200":
content:
application/json:
schema:
$ref: "../../../components/schemas/_index.yaml#/V1WorkflowRunExternalIdList"
description: Successfully listed the tasks
"400":
content:
application/json:
schema:
$ref: "../../../components/schemas/_index.yaml#/APIErrors"
description: A malformed or bad request
"403":
content:
application/json:
schema:
$ref: "../../../components/schemas/_index.yaml#/APIErrors"
description: Forbidden
"501":
content:
application/json:
schema:
$ref: "../../../components/schemas/_index.yaml#/APIErrors"
description: Not implemented
summary: List workflow run external ids
tags:
- Workflow Runs
listTaskEventsForWorkflowRun:
get:
x-resources: ["tenant", "v1-workflow-run"]
@@ -335,3 +335,74 @@ func (t *V1WorkflowRunsService) V1WorkflowRunDisplayNamesList(ctx echo.Context,
result,
), nil
}
func (t *V1WorkflowRunsService) V1WorkflowRunExternalIdsList(ctx echo.Context, request gen.V1WorkflowRunExternalIdsListRequestObject) (gen.V1WorkflowRunExternalIdsListResponseObject, error) {
tenant := ctx.Get("tenant").(*dbsqlc.Tenant)
tenantId := tenant.ID.String()
spanCtx, span := telemetry.NewSpan(ctx.Request().Context(), "v1-workflow-runs-list-external-ids")
defer span.End()
var (
statuses = []sqlcv1.V1ReadableStatusOlap{
sqlcv1.V1ReadableStatusOlapQUEUED,
sqlcv1.V1ReadableStatusOlapRUNNING,
sqlcv1.V1ReadableStatusOlapFAILED,
sqlcv1.V1ReadableStatusOlapCOMPLETED,
sqlcv1.V1ReadableStatusOlapCANCELLED,
}
since = request.Params.Since
workflowIds = []uuid.UUID{}
)
if request.Params.Statuses != nil {
if len(*request.Params.Statuses) > 0 {
statuses = []sqlcv1.V1ReadableStatusOlap{}
for _, status := range *request.Params.Statuses {
statuses = append(statuses, sqlcv1.V1ReadableStatusOlap(status))
}
}
}
if request.Params.WorkflowIds != nil {
workflowIds = *request.Params.WorkflowIds
}
opts := v1.ListWorkflowRunOpts{
CreatedAfter: since,
Statuses: statuses,
WorkflowIds: workflowIds,
}
additionalMetadataFilters := make(map[string]interface{})
if request.Params.AdditionalMetadata != nil {
for _, v := range *request.Params.AdditionalMetadata {
kv_pairs := strings.SplitN(v, ":", 2)
if len(kv_pairs) == 2 {
additionalMetadataFilters[kv_pairs[0]] = kv_pairs[1]
}
}
opts.AdditionalMetadata = additionalMetadataFilters
}
if request.Params.Until != nil {
opts.FinishedBefore = request.Params.Until
}
externalIds, err := t.config.V1.OLAP().ListWorkflowRunExternalIds(
spanCtx,
tenantId,
opts,
)
if err != nil {
return nil, err
}
result := transformers.ToWorkflowRunExternalIds(externalIds)
return gen.V1WorkflowRunExternalIdsList200JSONResponse(
result,
), nil
}
+384 -229
View File
@@ -180,13 +180,14 @@ const (
// Defines values for TenantResource.
const (
CRON TenantResource = "CRON"
EVENT TenantResource = "EVENT"
SCHEDULE TenantResource = "SCHEDULE"
TASKRUN TenantResource = "TASK_RUN"
WORKER TenantResource = "WORKER"
WORKERSLOT TenantResource = "WORKER_SLOT"
WORKFLOWRUN TenantResource = "WORKFLOW_RUN"
CRON TenantResource = "CRON"
EVENT TenantResource = "EVENT"
INCOMINGWEBHOOK TenantResource = "INCOMING_WEBHOOK"
SCHEDULE TenantResource = "SCHEDULE"
TASKRUN TenantResource = "TASK_RUN"
WORKER TenantResource = "WORKER"
WORKERSLOT TenantResource = "WORKER_SLOT"
WORKFLOWRUN TenantResource = "WORKFLOW_RUN"
)
// Defines values for TenantUIVersion.
@@ -1909,6 +1910,9 @@ type V1WorkflowRunDisplayNameList struct {
Rows []V1WorkflowRunDisplayName `json:"rows"`
}
// V1WorkflowRunExternalIdList The list of external IDs
type V1WorkflowRunExternalIdList = []openapi_types.UUID
// V1WorkflowType defines model for V1WorkflowType.
type V1WorkflowType string
@@ -2451,6 +2455,24 @@ type V1WorkflowRunDisplayNamesListParams struct {
ExternalIds []openapi_types.UUID `form:"external_ids" json:"external_ids"`
}
// V1WorkflowRunExternalIdsListParams defines parameters for V1WorkflowRunExternalIdsList.
type V1WorkflowRunExternalIdsListParams struct {
// Statuses A list of statuses to filter by
Statuses *[]V1TaskStatus `form:"statuses,omitempty" json:"statuses,omitempty"`
// Since The earliest date to filter by
Since time.Time `form:"since" json:"since"`
// Until The latest date to filter by
Until *time.Time `form:"until,omitempty" json:"until,omitempty"`
// AdditionalMetadata Additional metadata k-v pairs to filter by
AdditionalMetadata *[]string `form:"additional_metadata,omitempty" json:"additional_metadata,omitempty"`
// WorkflowIds The workflow ids to find runs for
WorkflowIds *[]openapi_types.UUID `form:"workflow_ids,omitempty" json:"workflow_ids,omitempty"`
}
// V1WorkflowRunTaskEventsListParams defines parameters for V1WorkflowRunTaskEventsList.
type V1WorkflowRunTaskEventsListParams struct {
// Offset The number to skip
@@ -3045,6 +3067,9 @@ type ServerInterface interface {
// List workflow runs
// (GET /api/v1/stable/tenants/{tenant}/workflow-runs/display-names)
V1WorkflowRunDisplayNamesList(ctx echo.Context, tenant openapi_types.UUID, params V1WorkflowRunDisplayNamesListParams) error
// List workflow run external ids
// (GET /api/v1/stable/tenants/{tenant}/workflow-runs/external-ids)
V1WorkflowRunExternalIdsList(ctx echo.Context, tenant openapi_types.UUID, params V1WorkflowRunExternalIdsListParams) error
// Create workflow run
// (POST /api/v1/stable/tenants/{tenant}/workflow-runs/trigger)
V1WorkflowRunCreate(ctx echo.Context, tenant openapi_types.UUID) error
@@ -4405,6 +4430,63 @@ func (w *ServerInterfaceWrapper) V1WorkflowRunDisplayNamesList(ctx echo.Context)
return err
}
// V1WorkflowRunExternalIdsList converts echo context to params.
func (w *ServerInterfaceWrapper) V1WorkflowRunExternalIdsList(ctx echo.Context) error {
var err error
// ------------- Path parameter "tenant" -------------
var tenant openapi_types.UUID
err = runtime.BindStyledParameterWithLocation("simple", false, "tenant", runtime.ParamLocationPath, ctx.Param("tenant"), &tenant)
if err != nil {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter tenant: %s", err))
}
ctx.Set(BearerAuthScopes, []string{})
ctx.Set(CookieAuthScopes, []string{})
// Parameter object where we will unmarshal all parameters from the context
var params V1WorkflowRunExternalIdsListParams
// ------------- Optional query parameter "statuses" -------------
err = runtime.BindQueryParameter("form", true, false, "statuses", ctx.QueryParams(), &params.Statuses)
if err != nil {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter statuses: %s", err))
}
// ------------- Required query parameter "since" -------------
err = runtime.BindQueryParameter("form", true, true, "since", ctx.QueryParams(), &params.Since)
if err != nil {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter since: %s", err))
}
// ------------- Optional query parameter "until" -------------
err = runtime.BindQueryParameter("form", true, false, "until", ctx.QueryParams(), &params.Until)
if err != nil {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter until: %s", err))
}
// ------------- Optional query parameter "additional_metadata" -------------
err = runtime.BindQueryParameter("form", true, false, "additional_metadata", ctx.QueryParams(), &params.AdditionalMetadata)
if err != nil {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter additional_metadata: %s", err))
}
// ------------- Optional query parameter "workflow_ids" -------------
err = runtime.BindQueryParameter("form", true, false, "workflow_ids", ctx.QueryParams(), &params.WorkflowIds)
if err != nil {
return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter workflow_ids: %s", err))
}
// Invoke the callback with all the unmarshaled arguments
err = w.Handler.V1WorkflowRunExternalIdsList(ctx, tenant, params)
return err
}
// V1WorkflowRunCreate converts echo context to params.
func (w *ServerInterfaceWrapper) V1WorkflowRunCreate(ctx echo.Context) error {
var err error
@@ -6830,6 +6912,7 @@ func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL
router.POST(baseURL+"/api/v1/stable/tenants/:tenant/webhooks/:v1-webhook", wrapper.V1WebhookReceive)
router.GET(baseURL+"/api/v1/stable/tenants/:tenant/workflow-runs", wrapper.V1WorkflowRunList)
router.GET(baseURL+"/api/v1/stable/tenants/:tenant/workflow-runs/display-names", wrapper.V1WorkflowRunDisplayNamesList)
router.GET(baseURL+"/api/v1/stable/tenants/:tenant/workflow-runs/external-ids", wrapper.V1WorkflowRunExternalIdsList)
router.POST(baseURL+"/api/v1/stable/tenants/:tenant/workflow-runs/trigger", wrapper.V1WorkflowRunCreate)
router.GET(baseURL+"/api/v1/stable/workflow-runs/:v1-workflow-run", wrapper.V1WorkflowRunGet)
router.GET(baseURL+"/api/v1/stable/workflow-runs/:v1-workflow-run/status", wrapper.V1WorkflowRunGetStatus)
@@ -8406,6 +8489,51 @@ func (response V1WorkflowRunDisplayNamesList501JSONResponse) VisitV1WorkflowRunD
return json.NewEncoder(w).Encode(response)
}
type V1WorkflowRunExternalIdsListRequestObject struct {
Tenant openapi_types.UUID `json:"tenant"`
Params V1WorkflowRunExternalIdsListParams
}
type V1WorkflowRunExternalIdsListResponseObject interface {
VisitV1WorkflowRunExternalIdsListResponse(w http.ResponseWriter) error
}
type V1WorkflowRunExternalIdsList200JSONResponse V1WorkflowRunExternalIdList
func (response V1WorkflowRunExternalIdsList200JSONResponse) VisitV1WorkflowRunExternalIdsListResponse(w http.ResponseWriter) error {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(200)
return json.NewEncoder(w).Encode(response)
}
type V1WorkflowRunExternalIdsList400JSONResponse APIErrors
func (response V1WorkflowRunExternalIdsList400JSONResponse) VisitV1WorkflowRunExternalIdsListResponse(w http.ResponseWriter) error {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(400)
return json.NewEncoder(w).Encode(response)
}
type V1WorkflowRunExternalIdsList403JSONResponse APIErrors
func (response V1WorkflowRunExternalIdsList403JSONResponse) VisitV1WorkflowRunExternalIdsListResponse(w http.ResponseWriter) error {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(403)
return json.NewEncoder(w).Encode(response)
}
type V1WorkflowRunExternalIdsList501JSONResponse APIErrors
func (response V1WorkflowRunExternalIdsList501JSONResponse) VisitV1WorkflowRunExternalIdsListResponse(w http.ResponseWriter) error {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(501)
return json.NewEncoder(w).Encode(response)
}
type V1WorkflowRunCreateRequestObject struct {
Tenant openapi_types.UUID `json:"tenant"`
Body *V1WorkflowRunCreateJSONRequestBody
@@ -11983,6 +12111,8 @@ type StrictServerInterface interface {
V1WorkflowRunDisplayNamesList(ctx echo.Context, request V1WorkflowRunDisplayNamesListRequestObject) (V1WorkflowRunDisplayNamesListResponseObject, error)
V1WorkflowRunExternalIdsList(ctx echo.Context, request V1WorkflowRunExternalIdsListRequestObject) (V1WorkflowRunExternalIdsListResponseObject, error)
V1WorkflowRunCreate(ctx echo.Context, request V1WorkflowRunCreateRequestObject) (V1WorkflowRunCreateResponseObject, error)
V1WorkflowRunGet(ctx echo.Context, request V1WorkflowRunGetRequestObject) (V1WorkflowRunGetResponseObject, error)
@@ -13062,6 +13192,29 @@ func (sh *strictHandler) V1WorkflowRunDisplayNamesList(ctx echo.Context, tenant
return nil
}
// V1WorkflowRunExternalIdsList operation
func (sh *strictHandler) V1WorkflowRunExternalIdsList(ctx echo.Context, tenant openapi_types.UUID, params V1WorkflowRunExternalIdsListParams) error {
var request V1WorkflowRunExternalIdsListRequestObject
request.Tenant = tenant
request.Params = params
handler := func(ctx echo.Context, request interface{}) (interface{}, error) {
return sh.ssi.V1WorkflowRunExternalIdsList(ctx, request.(V1WorkflowRunExternalIdsListRequestObject))
}
response, err := handler(ctx, request)
if err != nil {
return err
} else if validResponse, ok := response.(V1WorkflowRunExternalIdsListResponseObject); ok {
return validResponse.VisitV1WorkflowRunExternalIdsListResponse(ctx.Response())
} else if response != nil {
return fmt.Errorf("Unexpected response type: %T", response)
}
return nil
}
// V1WorkflowRunCreate operation
func (sh *strictHandler) V1WorkflowRunCreate(ctx echo.Context, tenant openapi_types.UUID) error {
var request V1WorkflowRunCreateRequestObject
@@ -15271,228 +15424,230 @@ var swaggerSpec = []string{
"N2Yy6bFIeL/NRMDLpVGru0ARLGq7/pGf7VjjLaougNgIuWykVgugRhXJ/G/ZXul5WazUyGlnZ5STIOVm",
"OonvaRn+FyMo9xRAlPXqWt9imPAe1+k4RH4VKbDxKjIB6jDvzKaL/Vtm04din6QUvfp6yc6Np+dfBped",
"budL/8v7/rBCdlY/8WIef2wPFTP5g0o4Z2/V6jCRg0NzmVTN3WS8YqirQoCkfB2LypPA/7ij5/ROt9P/",
"k59c9RM3PdGfjj6LP8+GV5dalF8F3nMWlMmIBMms4sEU++6xNyZm4cyfdpHYewIJS0FSMq14b/MDpGZv",
"yczPyNbzMoyPbV+iGf7V0lsoeqhnXUU9bu/C6jas+XOwGSQwkY/CpA7lY3m/oAN44B17AVh0vWPvCcIH",
"+t9ZHJHpr0sGMij0GB+J2UWuRNR1HCLfkGKKW/tVB2BVS4I3NRgMDURunv3qwuwFcPbVCR+YqzC1CqPM",
"aaFJoz+POt3On8cVwqRpJx5vt4Xwb+uLgltWi6NJXvaKFKbPasDXmPJZX3nNe6q1ZFu22lY6IKL/ywNi",
"p+y99gO3rqCXdAVt0EWzkYIfDdz0L+Jlt3DwVxbXYn/Ghq9Bik0ZI3QW48ExHsLenLX2QBR4PoiimHiA",
"lStidRBltsOSbjFBh02n7VpvEwiCBGKse51ydrJ0Y5SdT/TDJ4CnJlU1BXiqD/m/cGE6oby4qcnLCI54",
"RT7vbAqIdcI/YYLuUR16me+Myq9H0VyUsszBYOaiKcD2gpnGOYCqkOlhSCy8uYk7rgDheQgWOSaS+9fY",
"TZXH7jcLgeUrilqZIIJPdiQyvodPGdakzWyGfQmbRVUsfWZhhVWAKCAq8bcaDKW8XKqeqo4nG8ov4gmK",
"lq+XsRx/r1Q+Y+cwLtc4r8O1zIawV+h2064WwbCDuyVr+rlumm6S4yma4311oZZcylvU5pvQMnwy07b9",
"eXzWvziH43Sy7updXWHLYjRLQ0AgzooUsLswP07DwBtDdv3JrQ8Qidz2ceKBnLVtKmQAc+XVyug66194",
"WRt2tngEYUqp3xhyGxKYXINFGAMLB/Im3py3Ka8PyE/U+vDiiP6QwEcUp7gnQkjFGJ2qV9flidmn8nyk",
"9EpOPGKv9rJoeJOz1lGGLYGFino2cIFe9N5DsgAa2wBW55BndTfsRBaibHrWg9NQvbcp7HA2Oqs9z1KV",
"Y3yfhkZD0O0dQBkL8klAKYjYGhBvHcPyXJN+yy1RrYuVaOEOPBYJx2pY2tNb/nnMa33eAPxQUZiSwCQC",
"oUi8YnV1iWbe4BxLUvRB5CXwXhzcETfIAX6g/JsjTL2z7iNbay4aycP1W0rx8YG3NR7gJN5CGNCm2JSI",
"CdvuIzi6GBrUslGAudB7ggnMqiBsDBXPfBFM5vCFVtUlrZSiGn/J00FRhukWTIX4lMLRNoz28qeu9qHx",
"hoSPd+Dd0lM8nQSnY8zDsCjKA2b4iFbYA0SXRm4JHCoTb62aLsGSuoe/tmEIyak8i6BhWy7eMmt7Hkfw",
"6r7z7q9aYWfo/x5g5LO6/s/dZfqfXg94gZZlOn/6cnrWef5mXZwYnDlsw1WWCBmABcuHLrpWmoihOCQC",
"T6zrzcJGxXTnmB83JVMYEeQLKoyZJ0YyiHjarAn90+vB3ef+Pw3CvpiNTk7PITFQix2lDBnm5GOf4aLf",
"2OrSl8TNuwe4OPBuWCwU9pjTjcSiSn6+lXefxDMdF1KIHKyQu05htRyDzNw9Mh7JaddHWRdbvb+sRdeE",
"RXdGzhhxB8hdlwobovb3p6PB2WZpnYmXHcAmhWOzyGQrXRsuz8HkTMsgUMyYYcgtUG+RqXJqZcMuABPX",
"nJwGXnpFJfacDDC9GrVuyo+hB6KF94/R1WUPwwSBEP2HXcbxlR0sZapVTCYlvzhXx4nnAwIncYL+o1d/",
"KotpCKOq7DSYgNlcXB0qbcIDrXm1eMe3fDtVrlAkQWJZ52wFs7RTlpyM3VRmRw81ijdeFGZ05FTGTDca",
"MMaaOfw7iiZCvl02Uc0iVlqBmsHJzvVgPg+RTwlzTXUdxaJWquxonPdbJn52wBMqBaHltFje2BK4nFHr",
"KVwwdGkbGTka9rA2JZVDuiiN+BWjqamTNDrY0PnMntDZTlY/SQHFtsxhRVKBRJV9+LcsBpHNrvZEy0ch",
"pMUH5cNa2k9T5R0xcRfC59APQQKIyNdhv2oXnI2wF2RdvF9IksJfqQKfJ/EkAbMZIMj3frkHIYa/rvsa",
"3mrjaMaaNHWYwVbGx344n9ZhUFRsewPfVt3Y6xSs1Xk5be6wjCx0NtoJrSs9zS6pNf88tlYBA4TA2dxi",
"9oqPmgQrFgEzJMvZSlmxUNboqkZSsZ7Wy1UjKybCMV1CkWThsRwaLphuXt6sgI4VCpxlI+0CJ1SWIvvz",
"mKfOby+rml5Wcbxt5q4qEWNv+KqKgm5z0zSXe3RBZpnHbqW/VGQkYsdM6X2r35i+ar5kBiTnfFyVljAk",
"CYK4fvn0yzmPIrEm5KZtnJxtPOcQc7Y0S3Ukj5DNEqfyJhw4fWp9zzJcmw9kast2QhxmRF/HFZIgN5zW",
"qGEeIzlWLn9RMWeROeFRMY/RqH95c3ejL0at4Y5rt1LSpbNh//SmUD3h8+D62pLUKCdIHX3B7glUMIr4",
"k8Amib5hU2LJslEW508jwsPRmtYlyINQz/FVz+A4Euycdx2jiPDnb+UdEARnFKBZ1ifzi2I0g0uW/RCN",
"DGmlnJZh0LY8QqnpzuqocTwoMDspjWz49CsTOTpFPukkZ452qsokV4CwKUaypRnIPQebJheVJMgyhp1d",
"fbm+6N+UEoVV5D/LX0ctV0RAO53ntXE2zar3T8xsE57NEvbXajXpF3p2M1K2YgNh9xuFmru/mmNqduGj",
"cPIEsIgmaPDQPMibRW7Rt4Yt0EZMs3pYhuHE1+JQXQ9F3gyFIcLQj6MAuxmydQGYhVm8X9RbcEAgJvS3",
"X+vLPTqhnw4vu7njvy78tQLlgupFMLf8cQ4jMEcHl3F0mYYhGIfwHyOWfUG16qHZPE7YpCICvNx4Dug5",
"pjNBZJqOD/x4djgFxJ9C0gvgo/z7EMzR4ePxIYbJI0wOY8B09PdeJMbqvGOe0BVfH6Wz0Rw8RTA4q2RH",
"zYnNm5cZsyotcHlA/q0hBe3RnvBs58zWVt4F5zsm3lnJzloDagOHOofCNAYO3VBxmqKhmqVCtxSmKSvK",
"Vd0Ly23kGmd38NhXntAHEYZJc5WHRLemEQ6uFwz52t/brDZaG0Qlc0QIR4w83pzF0T2aGDNS5C8/nC+g",
"XcqYLUF8hacuzuDkyp2VZxIPrg0TrVK2Rndg61ZTl7to5CMUg75SekYrjFxgV93Fk2eFfL0c7u3J3QqZ",
"t+Bb0aDfrOun2su6Lqu49I5VAS8gsR/IbtBM3K1v0M0awDmZWuxe+ilnTMiC94DA5B6EoXnIrRmiK1cy",
"2owl0VBw8qCDhsiiWoR3dEfXazNoDC70NZwVW6PlJzJalotW022AlcrKceFbULHnOUW9jNL9VlAhL6lH",
"KTWx7NqN1KlQfWvTpltLk5avb2wI7BZfbaRkLgml27M1cbmidf1D80Kp3Cyh25/HPGdP+xpx6YAw8zWA",
"SIVUevG3Z6+3im/UXd9JWcu26q9o3N7vyQ7P3X147LbxWh2beye3nshGe+FXp4BCx9d52nOsbzrBaa89",
"y6Q3R59tL4BOrwdsrzVSyb/kMuF7CkEAEzdZzdsWSVFMW4srbaauXEcl451qbJZ/t9dVr1W7tsdn2ji5",
"t41Fk8IpLQ5d6piOwhBqzF2HqcFjQ6L8WjtQAWVq1JoEOfm3fuGEauzpTMfb6NPpcadL/3Py9nf+x9vj",
"k0638+X8bTX21PNBQxpLbSL3p4iqF8ug6MeB8Bk4j9CXnVhwxCQCJE3gp5XpmA7tqfGMsglNIlZ/xU+g",
"5SiC2TfGhlIe015OExTfSypEaXgyr7gIWi2N9DW8q1ec/f+P1bUa9dkbBP7H7fCimjx2ItpJamrH8Iay",
"3tDQ8LF/2R8yGfNxcPPp9j2LYhoOrvssAOn07HOn27kYXPZPbVGxmtG+/oehlRfzza+zpWemvdJur7R/",
"rivt9ta57Cte0fe0277TvXHdNbwWrLmHMzj5xNXcSo4+FOS8fNnZJn8rl7skUxdwumtG04bnkMhk54W4",
"xvqa9Xm1SqlkCuoP4Pr7WNr+Q5wY4JE+8kdZmL7uOQNrmOXDyF+wrh6gzcHB60txUXtnXX5f2snhRKJb",
"Qlbe2rw5kN/eoOZVwAbqwulTVgH7Uo5m3Tpq4Gm2YHxdXudcrIP+Muz0o6g1ZDR5hTnNU6avtRRYI2+S",
"SLduTi2fWBKqyr5pEjby9ohTOR3XhMscSnhiH3sa7XUtErudS6ncErl6qZbwBvdeFBNvnsSPKIBB1wNe",
"AqIgnslOTygMvTH0JjCCiTwm6NruZGMYb47mYDcJcLm92TYpKzhrkU0Fpz1361aP/3nx4+QCyHWxMqY4",
"FN8By76xWxYQBVk1soQPtdyRegbJNA4arVaA/oX3VLbzWRxYqPbTzc21zALrx4Gi4EQg3/2N8R3gj4zZ",
"zLmJvzkivJqEBCpr9KikednaOdmQkQKWpp0vausyL9JNp9u5vhqx/9zeMCvEpiH5uw5c9egDi4sJXlHE",
"B5E3hwmlq4NGxbjBI0DssGjP9JRLglKeFn6Hfkqg58eRqCwXLiyBWgjP2cnVmNWHUh1S+cIAxmgSwcDL",
"OjHPzu3t4NwT7LP9E1sIxjDE1WX1WBvGUrkrba4G3EiRC1Q6jmnLQoDJJwgSMoaAVJ29c1vFqiSy7OfA",
"m8re+VPvydHJSe/4pHf85ub47buj39/99sfBH3/88ebtH72jt++OjtzTMADOzNQ86GMCxiFzZu0gpDPw",
"3U74M/AdzdLZ+hhg83aH3d5IoA9VbUBsyzVB2/BQeV51Kk6WIeBhfi4DDSei4gYM5BED1+aFwV7Wy4sj",
"fR8aQFac1whdGlGCGUT3sRuvDrUOVOmGsU1PYTgD82mcQI82EmJiSTSP5FgjNp/ppbBz6vdsapX+9+xm",
"8Gefpf9Qf16f3o4s7xhdguc5slTgPNeb1oxAQpNzeV8Ast5Zxnvf1tnGt8MLw/BNTWXW3mjmaKK8pOUr",
"M3fKXC+067pjMiqKw/KisDWTVycqrMDDy18NWg8FCshhnvkLlWFBNEnFlZGzWBidf8ZcLfLOWlG7cooO",
"s9kmJFL/O0mAsQEOHuzDlhbHINKN06uLU/aA+fqfN5/YBcTNP6/7o7Ph4PrG7OHJOFmPKehffPh0NeLv",
"n7+cXp7y5Alf++8/XV19tg4kS2sXnIQ6bZqD+9UvDrGD3QYl+rjCkUX6zKXd/o7HFsFKv5gAcqLPf8Rj",
"kyDfiuVgxZws6GQw3sBk+bUq7yIwHk2qL3BEtFZ2YqhcgbgBaSYntMsWicxKr6pBL6h4ZotMFE54bjea",
"SlFPING+sxLChviASGYC4NmnJpDwgj1+1tWb0L5K12mO4wNrKfQRSQCBk9qcthqEF7l+zS3szIjOF0At",
"ZsV8c1LvmJBTF1fTNWK1aosG56YEYArAwbkRh7L3ZxTlXAEfbi/PbgZMzJ7fDk/fX1DT6vz0Y6WApINI",
"/dmIgtnsBvaS381KeaWHSVvW52Yz/rliP60JVRiTfIZVb4xITEBooljFYw9wYYk6kcNTsnR7xiRPYcDD",
"c+ije+Rnk3i/zAHGMPAeERDB3b+aucKKiAYhSdmv11prkqTQMH7dDZ8e26OO9cdHR0fWWB3jMPnomoaB",
"Mo0W9Hc8lmLMVY9b0pWv/OSPa8Rtu7743OJM/zIg5MJN1hk6okcFGONH7Any3y8aDH6j9SoHdDQ0Sawh",
"IatkvM0G0oM9NLC/VQuTHTnhaWEh7kphmEZXSQCT94tzlEBfiSfpDxmdUTXdH51V6ulslA8Ihjm9r79l",
"z2g5J8U0yVgzyUiGu7Syu5Xdrex+KdltmeMnFO0V8XJLiGY22oDAmT0Cz3Jeqe9srRE1YvmBqrNQrpho",
"OUtBtPbMQmsY0CLTi3kqiw+2xaK6JURqo9ZRTyl94nX/8pxnTczyJxpSY+YTKaqci+9Pzz5fffhQqyXZ",
"tEudm/MCxU6MN3lxUowYiaNrTfKXYKUNRv4UBmlYkSPa0nlldfS1mDzAUcDUbDbmFW2tcTS5nAUbZMeq",
"Kjq4dhFWJwFLQ9qEjuRQZ7xjnRVaaF6aP2MIY8bVquS2kumMHwVzGb9JHm2eMrdqsTdgYkJvaKvz3dTl",
"H60544Bw63IIq+hHCIWzhB5k7s1ywcjSnC/vkIUb6yZk4dnGGZkcuRNXjuueFptX2NwyKODNIHmhCspf",
"ZmCFn/Ua99zcMqMvs8DuxC1EczTzvAtWebrOm60qMDRrtsiyuSsMlw3Rbz1YEq97kIbkujL1iGhkTUHi",
"dEmQXd290IVcnAQ85s8BVCxMgxs0g7GldAMmyH9Y2II86DcPi6sPt9s+jacbsBbW7tmqMwS6APGk3Qu7",
"+v8bZ2J0Pk7JZcnNyw30rZ5j2Nav846lCQ3txJ5sC+E8MCG7XCnUlEwgC5Y6s+ern4HvNS2emhnNtqT1",
"/A1ASuUYPQDMOIRjCBKYyPQKDKNMPLOfs02ZEjJnx4c4fkBQNkd0V/lP8g76XUc8Fs36ikwbtHeKSTxz",
"nOyZ+d94UIwhjpzP4p1eD1jBFMJ8S/lfFSF2jg+ODo4YHfPnsp13nTcHxwdH4uUrwwR73RqKIoET01OJ",
"j/Kam7aKIMae8mvQTQcyc37nQnz/yNAgQ9vZLCdHR+WBP0EQkilD0VvT98uYqDlzG9l599e3bgfLZP4U",
"wqyhjKP4S4zvT6H/0PlG+7O1JhAEi/rF0maoarVD2WCdy2XAeST2gO/DOfFIAu7vkV+7egVt7fIfjw9B",
"SFk1mvTgDKCwxy468eEP9rP+2zOHMYTEYN2fs9+xB1RCH9rdY9353WkJY6e0RZ82YKEAfARGiwmYQcJ0",
"4V8VQSilGTyRnLbzjr/kVsxYWkpHFxbcf83F6MqH4edvpb3/rYytUer7EOP7NAwXHkdpkMuGVELec7fz",
"G6cSP46IqKIlSuzSQQ//xlzZZOuoUW79JImp+fDM7MB8jMUMhBQLMPBYopxAPuzgYLxZOxgmKD7EyRgF",
"AeTWcUbfnE6qyExSvKjb/a3b+d5LhCpnH0TZ766BML6xYxnxDalt+XFgFRLnI/wcJM7o4X3MZedaiIFj",
"h29aAXHqZVCZTCqxRWIvlTjPY+PZLKLXshDjEkyw58QAB7QVA45igFPL5sSAriDnqEfiBxhRrSj/Ztpw",
"HmOD0TCEj/ED9EDEcrKx1iKaSM1YEBNzdENbSYcD7e4iJdTwFpkgYd0pdZew5Qk6Z9D93ESNm1C1IB26",
"sTdi5yQZZ79VUbLa8hwF+2GcBof6yddu7ZZyXcnjBBvEQxEmIPJhiYjP6GcZ/mA3gjePWwaIl0bqkeXO",
"EFiN1c4RrN8ni63/ot0Afe/JIXrxnAdjCI2m7Td31x7+YP99rtpvKqVYq4PShjKvLd/IWknEM5zajBP2",
"datCaH2bLZLD1Chvnvj+UYg1jg22Y61sy5G4hpmMvDmKK6Qap59vdgo/rBNrbFuUVKuh+XMlwF473Z8z",
"Em5pf7dofwaX1uFW7b09xS1yRjWhKaUS90SRr0OF0zEOmf+b7xK27vgFwvQAFHq51rYNpq0H+YYb2206",
"l9hxbcqGmy9zjORWt0uEoLaebURhE8r7n9vkOEIkptL88Afn+OfDeRKPof1wKe/9PJBdLZPYY35dhq/8",
"C3M7w6upr2NMhml0zeZ1903ZlJ6SXFvWehUEJXJFcHpi+D3Yqla4jAlLNB4n6D88GbXIGsOzKfBXhCU3",
"JwEohIHH/fYe2x7vg5Dng2xbzYojR2Y4BP7D4Q/2HwcvvjeiDbXqAHnKYV9F+h13p31uTCvxMBB30juf",
"x8kumTbH2wHjNspImE/8djsT86xOLDkeCMP4iU5vuhEoUq0Uvez3KhOLE12eYyJ8+ANH2IlbLke61C/z",
"S4QbsEl+MDujCM29c2xSQEbLKDvIKCWCVaxyOapklAgb2EQaLpq3yWy60HnlkbjEIo3vxl7M/ujaHQG8",
"/spSngANhpO3b3NAHK/DBponMf0HDFodtkOsaTtEsgz0HpjPJbWX1RpvU+BHAsYhPAzABB+q5NXWQyNm",
"p0bWziNTQLwxDONoor96V4mSwaR8pPzz+BywGoE3ou5tvbtM1kTMEojwpMaMZf6dwmSR8UwAJncoqFZz",
"m3rB4CR3CvC+1MHHmXrXVrj4HExUwWdjTqcKOUSnlLd/bNbX7SXsdt5uS/jRUyiazUM4gxEp2QbMeSHp",
"QF2dA/xglDCs4eEP+p+a6yWeq3+84HxTFCB0AkdXOy8kbVP6FNAtq/x8xWyLUJA1t3VYSm91NunHL1Ql",
"aOR6Y1h97fz5Gz/7bH7WG71oMrUU7uOUJxHaERGR8XNJRNjPDMRFhByG8aTOVgnjiReiCMrMPAKOokS5",
"iCcXKOIVJXZcqmyW7XVENFDK4qVXe3eX14yK+jTSv4gnq1M+/f9e9rzOfsOjlbuxEr+qZrMP5N+tSMJF",
"Yg8/oLlFqcb39xjmdar+IofVTSw/iK2ejiWr88YLy5Tsc8MZN6/Ws71e4pK+Nb1b1Z6TcSYJs7qaZy00",
"N6EPw8MAjtOJ3VHY53XSoQeK5bfBBKAIZ1VyRJXFABBwYJCHZzA8Z1Pty7Xm+qPq/zw+618wJNQE0TNM",
"YioKWdVFUq59LpC/1Vh6HXyZmqxG1Ikq+0LU5dfQ2jX6bcA4nZRYTOP5s/6FneWdeN3BruFOyLzoUbUh",
"i/zczLbZxXuCn8m+6ZaT80qH4gNcMFHC86Pap6XtOkaHbm1sonhNW+e5PYsjjAKYSBJjju7YZ1kQAg/c",
"E5anAWFPJEQzQYkRD7UwIKcil1pTWMbwPk5gLTBpRFC4BmA+8K0hcQ4akLAqQLGPmAR9QmSq3wcUi2wa",
"4MuejFt2dsOuevd15VJdezNA/Cli1x8+TAhAUfbUt2qdKmMVXIKSS+VznRentkSscryg6g4lHr8yMUEs",
"klq96LaMFx4w1FWPI/1cYnGllpNsGhdiSHgup3mAix6vtTEHKMHeLwFkgo9y38ID3r/e/evXotiqvIh1",
"uznCfjyHTvKQt3RdF2u9GrybPaO6n09bD1SdB0rxhmPoeAMD7ZCpYUcrjet2J0vtM1zsi7G28acUEhdN",
"GYGhu2UGEzN4wnpcJ0NwSerCDKJlLSdwxdceWnb10FKqd+egpmtt3MopSoYoM/n5nAerJ31uZpHgdIwh",
"8XwQBYi9qJd0vVYbpWrF3i2GAWMjDguhRngZHkCkZwdRa9GSvnqr5o3G2g3EuhQxrUzPy3SJl0ygc/xW",
"SfSuxYPMi5B7wIvgkxjYKpp529ftImYo4OhwcRMzL7EiZV5jm/sOt+kZFuRRx3qiAIQGcHvxta2Lr8vs",
"rivH8Io/FW+687y7FXf44/G4x/92ecgB6iRF4xRlu2XGCW5F7AVaINdiAE9hbW+DXxxFg3y10oqFlxQL",
"rqzf1QiTqv6KoFNlwHuIYHPoKZ/NNfh0p/n5lXPxJCatcremnVhCxxYZrTIhYr3a3POnXTm1qdIJviTD",
"beIIwDdp6SPAC6RZdJYPMrNiKx/2T8s7GPssgnaWlY2pMAuEZJRPnrwkjTzRszpDI7+nvUCY8LtaWaVm",
"X2UaKwzHnGiUjyc8Z41EQ00UhAOgjQIR2MPAKGgIzbrCIIq+Web9jQJVA7jGAyxeKL7Mi0ReP4aR8v/C",
"+uNKC9Ci3gxtfydb37HWGyW27LEnv7ZhARmqBl720sjyrJI3RNHkjlfT2RLkp4YwhYfeo4gccLgkyOIV",
"7maVAQsv68Smgm2YRlKiNX8hpkvR9jXn7jzVYnszU4rKLZLbXePOYxQRR707Q1FKID2Oy78SCB6C+ClS",
"qriBGv4IyTWdfN+VMFN4MgJReyAgHNadrlYv9uTo5Lh3RP93c3T0jv3v/1jkjix4fM9PIutQkAxSFZ+o",
"gxpT+FYAVtYjfs8Gbw7u5mVjjtSWkI6MT1r5uKPyMb87a5eS+NBnRTntb1140U718t4k73iT131ByVDA",
"TJWaUhA8m0ns+RJpW32rwiYNYcAzotTeTMrmbTqM9k1eSUYVJMPaJVMC5yFYVJWxoN8rJRNv8qolE0dB",
"E8mUSKRtUzJxMF0FUyJat3KplUsluVSQC2uUSyLJmUv0rUwkWxd9K/LUtuG3uxx+y8mFVQd3eyXD2l/S",
"5ss8uRI0MVKjuPpbJdE5Ayo6VEBaPcmLR7jq7NMgxFUxcnsXn49xVYjJ5KZA8cpRrrZ03WoT2zhXEecq",
"8NHkllsy5QtFukoaaRLquotpXl93rGs5h6sD7zcwm1i4q/iHW7xrrczY84hXOrkqsi5YuD72NcOKHdjt",
"+qFd+V/Gs7a8vxOhLrXs3dXJrSakVdKviGkV5qGFb/c5rLVgAP9sPCqjVVsetYSr1qhJGFEt2EsAgT12",
"AqWbK22wukjVWoW357Gqm+WdzcWd/rwmuV7Wu2X5HTHJDfJgeZ1tPptfx5gV10WRH89QNFH0OoMYg0mF",
"7h5CH6LHVgY1kUFRGoYlyo8W3hwswhgEHoo8EC08sdpuh8Dv5HAeAlSgtOKUq8qQLAbwOqHbTRAdhy9U",
"zBWP/4Z+lXcth6N7EGLYmgyWuimc6Qystix3u5y+RSRwL0mjupuLfMax2ruLLMNYe3+x+zkPscgC53SD",
"sbWMcSy6HiQhgpjlyYVO4G0w1D8EpAko64rz351wbscMNHvyBoECoV7HueTNgcmGA/e/TiGZcgGAIj9M",
"A1ZkClPtFUfhQv9d1T0yCaQoXNzJBrVGyjiOQwgih5cauSJYDjh7oUcbhlJd1tcbDllBX+wVh3cfgglT",
"tU+CLuKEhVXoZKDOliAKvDgl9E9hOmJqO9IG0g488M7hPUhDniv7X5Qe/uWhey+NMGRq3LR8MdOdHLRT",
"SUJbqwfU9F63DQXatZz9OYtSN3Tl70P6+4r3S7qFexggPA/BoseCIGrsXdGWDiuCJuL7CiO42gY+54Ox",
"YIq9toc10arqtOeRIl5BCvQJ1NkNAU2Wvki5xA071o0k0IquVnQ1FV3CCLGHPN/wBjKiJm/WVIimNrTm",
"WKBOQ0qNM1/HLnuwIXG4VS++JlsgASjEzWJsdAppvXLFkJcCA62BwfP8zOJdtF/qSpHmSI6a+ojgzBtA",
"YqVwRTr6/9sJGFH83443t7jsM/pxvHLPwcBPgBPW0+In15a3t3mcluCyVnPvUZlgR4bulgh6CRY/FEU5",
"qjid8AwnJGXmdZ7vD2q5eCSrfizJy/r0ms3+c7K27oxuWXpHr8HP4jQM+FsiFJktlx16l53jKlWC50Vk",
"jXNxVhCKh5rCn+F+dFBlPJ19Gq8nG38mVo3XIT+vRF2qbk4rVFs7qSi7CJqhaFJvLYl2jaXXR0huxBR7",
"e/YxyqAAzsmUv9bmGV08f4rCIIG2Cy7WYecKRPPNaSXJ3kuSKv5ct3iBcyFT5J/PhyDxp+gR1llBopUA",
"k3Y3ipARgXMR1HQqB3YQH3I8q/dUwtsGOO1m0Xqx72LP27r1e5GLQnFdIR9FWUjl2F9jfimf6PZT2VQl",
"mhQL18skl3NZrq60izzqyyqLrTR6JdLI/azVyqL9kUUa429eEoXxpC4SJownXoiikm1UdkdfxJMLFEFX",
"b1Arhl426juEjzB0CiDmLXMzVzGDpAPa6wOCYWDNngOp4vXYbBocFYncWYemgIx4L2PALWDhlHESVK2f",
"fX6/4GtpOPmV3teCBz59gBLoi/eAFVCca82WgSTrv1klpUuDtob2qul3lBTWdMFFPGmuBkSgUUVaVxYB",
"gUUkkSW88Yb9fKYHvqw7MIcPzieqS1DIQ5NeJhSHQ9go+EYg9eem8SWibhSxqcx8Ip6mSOQmilahc7Uu",
"Yx4aI27YKwm8aTIKFf4qZrBe+ex1VXhHipfpIlpq3+5pgxNjEEN+0IDfuQYuJRF3ZbZcNrfqLBURnw1F",
"k2q+2p9cFRuKOuUIaKLc5uoVdq6YW6vn9knPCT5ZgvUq9N0hCClhRJMenAEU9iZJnM4rL06pcSdPgYK8",
"2BgeG8ATAxRZ95Q26dMWH2mDfXnIsnlNaEJMw3Ib1k1oeSd/m1hBrY30mPPRpzxXHWO8+icV+smtgBs3",
"XVdCeaOj3fFm2XsJDWigoZavjWc/I7etV0seYkhIXWgRZrsnu3iyS/WbT41cUDQZiT57ktBwS2pSQ8wK",
"OlLfk5aVDMc6A5rWxkdz1CPxA6xJGeSdXg883q6aa07n6IY2a+1JfMjiiq4HDB94KGZpyCcyPqr1oReN",
"R0qRHLUaM6gfV0ljH2XU7kbsrY3IECBpXTMLN+nCKE7a8tean81mzNSQwaoUjkO0FK+skwuZsiWny4Jm",
"2qR0Ox2e8AAXTsEJtF3zZHSMDD7DhUuysAwmFb48OMeuWcO4rGgMoAyJHpwvCWL2Bm2FxH4uEA7TiL+j",
"FI6vFwn1YPv5MoEebOodCPPQ4dCDPCqIJcsnCBfeIwhTaM4qqAok/0XZ7fgda3rc6dJ/nfB/nVDxXp19",
"8Mt6kw9my+Dp3VT+wWo6Z40H28k7uMmzwlIv7dromsgec6kZLQy5q7uQ2bgWG6Q9AjAEMFzUuIVF+sYX",
"Ce/hlNDE5wt5j9ceXX3y39uZdSj4U5in8LsPYQAtpaz43jTg8/qDyeE4DR/s4XTv01BUeoA4kwm4UijQ",
"Pq9YMNDlNxQO+CWlA24uHtrXFzsmHxib6kICr1lKiKr+9rBb9p07MrT0ojkT1yY1eFgJH+E1GxQMAe4G",
"hTgwbKiq+Vwrm/FDeQKGaUTPHhtMde5cnUOIJoY0mOUoaYXUzgopUb98I/KJudEcfazcN+fgZ/0MF+21",
"XuZsXOq0zpDdnthNJ3ZP+H7XyQdCG1j1NOdB3Ew1D6WKea2qmSNgV1TzetxqHLjWqn9tChNFj4jApgHW",
"spc5aGzAvra6UsaKafhYKkpMYruNDTOFT2e0uKGYaT5BJa237m8tSpqjxC04muP2RSOiObjLBEILwnjt",
"bwFPTrakJQFx84sX+dYkF+zlq1fSo/KHHv/3MxcxISSwLGzO2e9YHTtdBA3vs7fRPnmur4atp9Cx75q/",
"VrZwCtll2ZJjM06EGbnacjbk97H2xW0zTtjzCvE7yAmbfRi8nFXwYk+DHTlXLyu/B5wrnuw25twqzTeD",
"szFjvkYnSNnLzOJf2Nf2BCmpUcPHUidIie32BGk6QWa0uJ5HRWK8wx/8Dwcj0AMCCO8+iWd1j/I4Nfwc",
"pqBYtg02/nmrvPvbRnh3GRvwdXDtDuW2vLSkslRMmtuYBvKiKwnZIe1MaRK7CPg5bOCdEAGbNX75drkZ",
"vwIdO5Iix1F6GexgsW+t8Hph4WWVK0sIryqrZ57EM0imMMW9GbVB/fpyJ1kXT3RRMTt1meyuVdcvYrKf",
"4qBA4HdyOA8BKlBFcaQmZ4AyllumfGmmpBxg2Jd1nUD+ncIUOrMha92YA/+H9toj5tvvl5D79Lht8/6Q",
"HO0t9+Lde4QJRnHUysRdkolqd8oSUXLOsjIxu+pzCQ1N1GVjXWzoEBB4QRu27/B3uZrlOt5s12Jyky+z",
"FZ3twOvsIizbSsOf57UGwccaO7fRxwUvuI6bTNyyaIsL/uuyElf06M3jEPmL+hR1soPHO7gkqJOhk9es",
"R5ue7tCEluUujQq70V4ebT3LIw6B/1CdmG5Em3hPcDyN44fydSr7/JV/ba9TeU46HSdNTg8FVO8SO2yp",
"QuptBFIyjRP0Hxjwid9uZ+IvkExjXjofhGH8ZK7OyjeI2YGcBXR9xj6uxIiHmICEWNlxRL9yPXZ1mpKp",
"xw4rRYa8xfLahgF0RRHKeu4jZ745OjHgQecehjKhVnJYmUIQiKiRMOYEU+PxZBsO/TRBZMHw48fxA4J0",
"UFZE5ZtODwyl+RklIdAdWJoO6vKEji5HRQIsCOQIt3JYyOHL0UBHVQNJXMRyK4t3ThaXGUFJ4svRCulJ",
"CwObGKx9jcEQkOevyqyk66PZ/KTOryqKu9oy9A4xtJXzHDm6UqOKun69bVxZiVLD+3ZztXl3gQkxzXwG",
"qv5tbmfaS5VduFRRe7Pua2ZTFeZK1s0KLnvjBWcoYwn4PfHjdXe1EvQW6rUvKR9aibBzhdp1EbGW4uxO",
"cqI2h9gpIXA2F8nwWFtNfNgEx74lD2slSFVIPMIsVlqIEE4E4e4dEF74Eq+OUbbF0AmkHStyDbGkbK48",
"zJq3LLyL2Y+SNBJbVRPRjqJ5yuIh+OWuabnPO2GptLmPKuQL2/CXECjZmip9AbyZCBaoEy4fIRnxYVvR",
"8nLWQbOsnhZPgxiuPVDs8oFC7tJGpIa4i+89xclD1RP0LKzTGijRxkhkIeocFV8ZUilCqmqLUWSoMHre",
"0ZPb0Trxd+1WTiP/5VOjiUFsLPTqb99y/MOxsaWSgIaZg0aJzeTWtpy7e9dvOuMt46znUrnaPU81JBfe",
"1bG3mW549coyw0RbeXPlo6Z8ApTPxsJxvOwllUQ0P142z4it1yA0JMbWCge26bG19NgaXnCNmyhX5fHl",
"kmWb4HYuqqt5kHIE0x5PdzKJdn6Pyo8Mqw+oTQTOD/2fdbfjOU6o1cCCTPf5srzA+mbQdAzusZkgtmvZ",
"98rt5bn9tXDeL13/Uribp6nl+fmQXXHUuqj5RQhnaB3ogxq+HrDRW+Z+eebOciNca6WwOIyreLPzOGLb",
"3Tq0t+TQ/qrjPnLJSpBtUlOTYX0SB0/BHG7IjhixsVt5szfGBN+w1qL4iSwKFREvIhEq35uJerGMxcNQ",
"3bphg61RxfrsORa/IO/L8kKtDFg7gBcAE29wztJgT6EXArmDtuQnAJNBYM1+8ubElP1kC5F7TcqK6ZKn",
"ja3Z0Rv7JWSJ+3W+myzETjcTrKWbRfMq0zEF8B6kIem8O+rmRMU2EjOpud8uM/mI52caLzw2gXlS8cn+",
"SnwbZld72bN+e2udid7UmI5lyj3gjQHxp6XLniqL6dXXJ9fvSTgyXIOBRYx6+arkVRctD9vbo5qkS5xs",
"tnFzgw/9JI7qLRLayvs7HmdAkQRNJrXhE2dJHL1qM2VvskaqjUUBnXYCiTKJD2qSA9sObhs469KZm4J3",
"WWdKGadkFN9kOtqh+VT7mfe4IhPneOHdi2yfa0sIqksR7J4UdLzYXF5QzSjYcmbQHDJWsNBbtWuw0kt6",
"bkPmOlW6hz/of3ryV7fiWWVF7HzxQQlnz0tpqdXbwMphdPvFtByrXhk3sc06WqxCZUZTs7uKPEF8e+5W",
"XSauyFz7HJ60w5y1IdXZqs19cOw3UtZrkQ91RevYrGpGZ+Gw5xXsdks+bKqGnS4gbriDw8nXR6mAF4Zz",
"8e3VmQp6ibnWVKiWA4ItNyEK3Ex5pg5cL/T0W8b6MKXWYbbLDjN2idzAW8bab9FVtot+vDlIKNIsoSsF",
"sHjjr/plxpbgMySeMMImgkQ2C9ep8X2WhwkgKYZOVdxk22W8WyPWV/iZXIB7QFHgBBVr2BikzygK6qHZ",
"e2cqQTPogXsKaCl4+glg+ZZZX0Ln5OjkuHdE/3dzdPSO/e//WJ3VrPspncBMvFSt9igUHdcaxxTiMbyP",
"E7hJkN+zGdYJcwWW71GE8HR5mGX/reJ5XUCvFdObuxwoe+Jf7dVA0XZsPRwbCZfezJ0Ai5B2yQoOPAEa",
"VXR59tfThDs+hNjnuratGd6a4ds3w1vbsrUtX+QJFF6xDjQTQG29gnr9voGazJmep6AGaUjVY43XULVc",
"xn84kp1bL+IuexE3dy5SBLBXkVOtMdUaU3tjTGXLyET1WnyzCiQnBldeWgPMG30jWZIwrddhvVaJxQLY",
"rF1y+EP92SuldKoNUDSD3NBm2fMwRQMOrCnMjaje2chF8+628QjF0EULnpoFJFhooyaIcS0MuNdlyfaK",
"+zapjltVvO8hjpuVI26Ggcra8pw9FqwsXAy8CD7Znwy6vxgUIXj7k2e9/vFadZqWStC2WlJ52UhIUQLJ",
"uvlbzXPbLN5bTw9vh78Vi9uv87pzuXWFoKui8s281tZkcc6PbJbH0iIQEtndHiyZEsM0aqXwNqWw3AFt",
"A5rIX6vdsMWadM3NUV0Cv8qTZit+ncSvMEjqbOK1i1xesKHnx2lEakJ0WBuZ/k5WGgGPAIVgHEImfTVx",
"Yz6Nf4SEF4TAZ2zGvRe9dVkK9zxLaW6zljx6c1Lh5NN6wy139DkkLZe7NM/+KYYJPvTTJIHVnI356YA3",
"9Gi3EvfeYph8hORMDLZBuqMzNaQzBnFb8+rla15BP00QWTAx7sfxA4KnKZVdf32joqrwzjVPbpLc2fYb",
"yHiCyDQdH/ogDMfAf7CS81k8m4eQQE7TV3R+z6iP6ET8nd1HNvQVxeWZHL5A4G+OTmruE3wxb1CedwpB",
"IMpbhjHfDGM5VSXWnwvIzOFOLjA/hyP6MAGJXRSM6NflEMe6Nscag2fzOGPQNURYHE9CuBl6Y0P/5PTG",
"0bdmessQ99PRG4oeEYEuNXClNcw7MKPbSX3TEW5Y34GYa4NaXJ/IKX4iRFhuTH6Brb3orFZZGugC9jLK",
"uzGcEHO0dwh8H86J3fN2yr5j5WETk5SoTd983qezGX8SH5xPVF+jtYL6+MpN9NdGASjy4tgu7b07fSWQ",
"JVStKN5IvzejL96ns6lSiHTwNdAXX3lLX5X0xbG9BH2F8QRFdrK6iCfYQ5EHmG48qDAwLthAG8rFQlUw",
"HX9LxaSdztFhPJnAwENRe3x+4eNzt/Pbycm21j1PYkoDzGnbjwgiC6/nPYIQBWwyuimiCYomHpQj2Q1e",
"Rtjmo3y3870HIzpVLwEE9pgPnNrQ/K7GxMxxSmq4OU6JGzvH6cs7qwSTxTtWG651UtVY04x6XP1TMzgb",
"wwRP0bzBGU7r5HaO4zrwS9ZNvIPaKIGbJ21+oNNR1B7qljnU6RisJ8k5wPgpTipCKVT6P9rBk+2rROq1",
"HHNzRtLZFEQTNdEuWUs+gyxQiGrFeWs0NTOaqlmdU36eGVe2pxI4oZI4qTp28xa40qRSkVKb4nsJxi5x",
"vERee9HYMv16TkqSytdzWMIh8B82ckk1oiPv8B1VjSRteGn1CBMsQKgsBC/ayRAoDJNHg5U+iO7jj5D8",
"KQZdaxksDdIsKcjxwdHBkSntiBZ59Jfq+s2hwtVNxWIL0ZYVxP4VegkkaRLlkFc46VAxm0YR5R81xfee",
"HLIXz/kr5zILPMHxNI4feiIQ7fCH+MHhSSdVdaJ1OVCN/+7+WlMMZA8EUxNtOQ7M8fmjhK9VbC/vnCg+",
"udTJ1Br9JVp8c2KOQ4FnFzeFbCpLyFZzjDDcsGtulp3lm/XET3LoefikQA3FzFBMaJO6KvWswI7arpY9",
"d4g9mVemtEVNeVTxJvvjuSb6mrcyBlaz4EwnnuNBplUxywYdvz8Ry41jR8WKW39kKSi59OBLHlDsMcjM",
"rK4vNlJJyO4lRXaCljdVtCOnN2y6QmAglSjb3jsoR17Ti360nGYp87EKsxW0SfFxj1Nym2Zlfxqci3by",
"hUyTxDAKwPaB3vYf6JmOQxrFLPk+pltnYblzQgOT6zU8FFvycVjLWy/NW/ortFUYy8Xsc+euZnbgTjDY",
"+m3BPDJc38pzqyvPZds2Dp0kQtE8bOWB1UBcjTlrzESnCg10k/KlGBTjPaqbDqumbFCRYRf42ZAVlec0",
"XUPJquULVpkBmyRxOmepZjMQ5EZZQWGdPsNFpzYNyIaFxIrp3+WlUpsBfgetiaVSzjcSXDI1kTW4RWbV",
"aJosaKkcQTspuW4M7HLgDe6ZdxunlDpg0GVcFQICMVE8hbB3D4k/hYEtIXkm+HfckBJksGTioRdLN6TB",
"2yjPUJtdqM0utIHsQo1Es5AN2OFWK6fJncSyiK3ZIxfMzyCXNyzlZMDUaqZgK+92ygTMSHFZE7AY+DeG",
"IIGJCvzrGkMBWSQZlwdpEnbedTrP357//wAAAP//5E41HlD/AgA=",
"k59c9RM3PdGfjj6LP8+GV5dalB97qXp29YWefb/233+6uvpcsRU5o8pkV4JkVvGGin332LMTs7zmr71I",
"7D2BhGUlKVlbvLf5TVKz52Xml2XreSzGx7Yv0Qz/ahkvFInUc7MiKLenYnUb1vyF2AwSmMh3YlKt8rG8",
"X9ABPPCOvQAsut6x9wThA/3vLI7I9NclYxsUeozvxuxSWCLqOg6Rb8g6xQ8AVWdiVV6CNzXYEA2kcJ79",
"6iLvBXD21Qm3mKt8tcqnzI+hCag/jzrdzp/HFcKkaScegreFiHDrI4NbVp6jSar2iqymz2rA15gFWl95",
"zROrtSRgtppbOiCi/8sDYqfsvXYNt96hl/QObdBrs5EaIA089y/ieLdw8FcW6mJ/2YavQYpNSSR0FuPx",
"Mh7C3py19kAUeD6Ioph4gFUwYqURZQLEkm4xQYdNB/BaBxQIggRirDuicnay9GyU/VH0wyeApyZVNQV4",
"qg/5v3BhOqG8uKnJKwuOeJE+72wKiHXCP2GC7lEdepk7jcqvR9FcVLfMwWDmoinA9hqaxjmAKprpYUgs",
"vLmJa68A4XkIFjkmkvvX2HOVx+43C4Hli4xamSCCT3YkMr6HTxnWpM1shn0Jm0UVMX1mkYZVgCggKvG3",
"GgylVF2qxKqOJxvKL+IJipYvobEcf69UUWPnMC7XOK/DtUyQsFfodtOuFsGwg7sly/y5bppukuMpmuN9",
"9aqWvMxb1Oab0DJ8MtO2/Xl81r84h+N0su6CXl1hy2I0S0NAIM7qFrDrMT9Ow8AbQ3Yjyq0PEIl093Hi",
"gZy1baptAHMV18roOutfeFkbdrZ4BGFKqd8YhRsSmFyDRRgDCwfyJt6ctymvD8hP1Prw4oj+kMBHFKe4",
"J6JKxRidqofY5YnZp/J8pPRwTrxrr/ayaHiTs9ZRhi2nhQqENnCBXgffQ7ImGtsAVvqQJ3o37EQWtWx6",
"6YPTUD3BKexwNjorR8+yl2N8n4ZGQ9DtaUAZC/KVQCmu2Bojbx3D8oKTfsstUa2LVW3hDjwWHMfKWtoz",
"Xv55zMt/3gD8UFGrksAkAqHIxWJ1dYlm3uAcS1L0QeQl8F4c3BE3yAF+oPybI0y9s+4jW2t6GsnD9VtK",
"8fGBtzUe4CTeQhjQptiUmwnb7iM4uhga1LJRgLnQe4IJzAojbAwVz3wRTObwhVaVKq2Uohp/ydNBUYbp",
"FkyF+JTC0TaM9hiorhyi8YaEj3fg3dJTPJ0Ep2PMI7MoygNm+IhW2ANEl0ZuOR0qc3GtmkHBks2HP8Bh",
"CMmpPIugYVsunjdrex5H8Oq+8+6vWmFn6P8eYOSzUv/P3WX6n14PeM2WZTp/+nJ61nn+Zl2cGJw5bMNV",
"lggZgAXLhy66VpqIoTgkAk+s683CRsV055gfNyVTGBHkCyqMmSdGMoh47awJ/dPrwd3n/j8Nwr6YoE5O",
"zyExUIsdpQwZ5nxkn+Gi39jq0pfEzbsHuDjwblh4FPaY043EonB+vpV3n8QzHRdSiByskM5OYbUclszc",
"PTJEyWnXR1kXWwnArEXXhEV3Rs4YcQfIXZcKG6L296ejwdlmaZ2Jlx3AJoVjs8hkK10bLs/B5ExLKlBM",
"omFIN1BvkakKa2XDLgAT1zSdBl56RVX3nAwwvUC1bsqPoQeihfeP0dVlD8MEgRD9h13G8ZUdLGWqVUwm",
"Jb84V8eJ5wMCJ3GC/qMXhCqLaQijqoQ1mIDZXFwdKm3CY695AXnH5307VcFQ5EViiehsNbS0U5acjN1U",
"ZkcPNYo3XhRmdORUxkw3GjDGMjr8O4omQr5dNlHNInxagZrByc71YD4PkU8Jc02lHsWiVir2aJz3WyZ+",
"dsATKgWh5bRY3tgSuJxR6ylcMHRpGxk5GvawNkuVQwYpjfgVo6mpkzQ62ND5zJ7j2U5WP0lNxbbyYUWe",
"gURVgvi3rA+Rza72REtRIaTFB+XDWtpPU+UdMXEXwufQD0ECiEjhYb9qF5yNsBdkXbxfSJLCX6kCnyfx",
"JAGzGSDI9365ByGGv677Gt5q42jGmjR1mMFWxsd+OJ/WYVBUbHsD31bd2OsUrNWpOm3usIwsdDbaCa0r",
"Pc0u2Tb/PLYWBgOEwNncYvaKj5oEK9YFM+TP2UqlsVCW7apGUrHE1ssVKCvmxjFdQpFk4bG0Gi6Ybl7x",
"rICOFWqeZSPtAidUVif785hn028vq5peVnG8beauKhFjb/iqioJuc9M0l3t0QWaZx26lv1QkKWLHTOl9",
"q9+Yvmq+ZFIk5xRdlZYwJAmCuH759Ms5jyKx5uimbZycbTwNEXO2NMt+JI+QzXKp8iYcOH1qfc8yXJsP",
"ZGrLdkIcZkRfxxWSIDec6ahhaiM5Vi6lUTGNkTkHUjG10ah/eXN3oy9GreGOa7dSHqazYf/0plBQ4fPg",
"+tqS5ygnSB19we45VTCK+JPAJrm/YVNiyRJUFudPI8LD0ZqWKsiDUM/xVc/gOBLsnHcdo4jw52/lHRAE",
"ZxSgWSIo8yNjNINLVgIRjQyZppyWYdC2PEKp6c7qqHE8KDA7KY1s+PQrczs6RT7pJGeOdqpKLleAsClG",
"sqUZyD0HmyYXlSTIkoidXX25vujflHKHVaREy19HLVdXQDud57VxNs2q90/MbBOezRL212o16Rd6djNS",
"tmIDYfcbhZq7v5pjanbho3DyBLCIJmjw0DzIm0Vu0beGLdBGTLMSWYbhxNfiUF0PRd4MhSHC0I+jALsZ",
"snUBmIVZvF/UW3BAICb0t1/rK0A6oZ8OL7u5478u/LUC5YLqRTC3/HEOIzBHB5dxdJmGIRiH8B8jlpBB",
"teqh2TxO2KQiArzceA7oOaYzQWSajg/8eHY4BcSfQtIL4KP8+xDM0eHj8SGGySNMDmPAdPT3XiTG6rxj",
"ntAVXx+ls9EcPEUwOKtkR82JzZuXGbMqU3B5QP6tIQXt0Z7wBOjM1lbeBec7Jt5Zyc5aA2oDhzqHWjUG",
"Dt1QvZqioZplR7fUqikrylXdC8tt5Bpnd/DYV57QBxGGSXOVh0S3phEOrhcM+XLg2yxAWhtEJXNECEeM",
"PN6cxdE9mhgzUuQvP5wvoF0qmy1BfIWnLs7g5CqglWcSD64NE61SyUZ3YOtWU5e7aOQjFIO+UnpGq5Vc",
"YFfdxZNnhXwJHe7tyd0KmbfgW9Gg36zrp9rLui6ruPSOVQEvILEfyG7QTNytb9DNGsA5mVrsXvopZ0zI",
"GviAwOQehKF5yK0ZoisXN9qMJdFQcPKgg4bIolqEd3RH12szaAwu9DWcFVuj5ScyWpaLVtNtgJUqzXHh",
"W1Cx5zlFvYzS/VZQIS+pRyk1sYTbjdSpUH1r06ZbS5OWL3lsCOwWX22kZK4SpduzNXG5onX9Q/NC9dws",
"odufxzxnT/sacemAMPM1gEiFVHrxt2evt4pv1F3fSVkrueqvaNze78kOz919eOy28fIdm3snt57IRnst",
"WKeAQsfXedpzrG86wWmvPcukN0efbS+ATq8HbK81Usm/5DLhewpBABM3Wc3bFklRTFuLK22mrlxHJeOd",
"amyWf7fXVa9Vu7bHZ9o4ubeNRZPCKS0OXeqYjsIQasxdh6nBY0Oi/Fo7UAFlatSaBDn5t37hhGrs6UzH",
"2+jT6XGnS/9z8vZ3/sfb45NOt/Pl/G019tTzQUMaS20i96eIqhfLoOjHgfAZOI/Ql51YcMQkAiRN4KeV",
"6ZgO7anxjLIJTSJWksVPoOUogtk3xoZSHtNeThMU30sqRGl4Mq+4CFotjfQ1vKtXnP3/j5W6GvXZGwT+",
"x+3wopo8diLaSWpqx/CGst7Q0PCxf9kfMhnzcXDz6fY9i2IaDq77LADp9Oxzp9u5GFz2T21RsZrRvv6H",
"oZUX882vs6Vnpr3Sbq+0f64r7fbWuewrXtH3tNu+071x3TW8Fqy5hzM4+cTV3EqOPhTkvHzZ2SZ/K5e7",
"JFMXcLprRtOG55DIZOeFuMb6MvZ5tUqpZArqD+D6+1ja/kOcGOCRPvJHWau+7jkDa5jlw8hfsK4eoM3B",
"wetLcVF7Z11+X9rJ4USiW0JW3tq8OZDf3qDmVcAGSsXpU1YB+1KOZt06auBptmB8XV7nr6Zbboki+2K2",
"9KipEIuhv1w7/SjKIxlNcmHu85Tua61e1sjbJdLBm1PfJ5aEr7JvmoSNvFHCa0DHNe11DiU88ZA9zfe6",
"Fondzs1UropcwlSLeYN7L4qJN0/iRxTAoOsBLwFREM9kpycUht4YehMYwUQeY3TqOtkYxpujOdhNAlxu",
"b7ZNygrOWmRTqWXPLbtV90Re/Di5KHJdrIwpDu13wLJv7BYIREFWLS3hQy135J9BMo2DRqsVoH/hPZVt",
"fxYHFqr9dHNzLbPU+nGgKDgRyHd/A30H+CNoNnNu4m+OCK8mIYHKGj0vaV62dk6GZKSApWnni9q6zMt1",
"0+l2rq9G7D+3N8xKsmlI/u4EVz1KweLihFc88UHkzWFC6eqgUf1w8AgQO8zaM1HlkrSUp4XfoZ8S6Plx",
"JCrfhQtLIBnCc3ayNmYdolSHVD4zgDGaRDDwsk7M83R7Ozj3BPts/0QZgjEMcXXZP9aGsVTuyp2rATdS",
"5AKVjmPashBg8gmChIwhIFW+gdxWsSqOLDs78Kayd/5UfnJ0ctI7Pukdv7k5fvvu6Pd3v/1x8Mcff7x5",
"+0fv6O27oyP3NBGAMzM1D/qYgHHInG07COkMfLcT/gx8R7N0tj4G2LzdYbc3EuhDVbsQ23Jh0DY8lJ9X",
"xYqTZQh4mJ/LQMOJqAgCA3nEwLV5a7CX9fLiSN+HBpAV5zVCl0aUYAbRfezGq0OtA1W6YWzTUxjOwHwa",
"J9CjjYSYWBLNIznWiM1nesnsnJo+m1qlJz67GfzJC92qP69Pb0eWd5Yuwf0cWSqwn+tNa8Yiocm5vC8A",
"We/M471v62zj2+GFYfimpjJrbzRzNFFe0vKVmUVlLhradd0xIxXFa3nR2prJqxMpVuDh5a8urYcCBeQw",
"z/yFyrUgmqTiSstZLIzOP2OuFnlnreheOYWI2WwTEqn/nSTA2AAHD/ZhS4tjEOnG6dXFKXtgff3Pm0/s",
"guTmn9f90dlwcH1j9vBknKzHPPQvPny6GvH32V9OL095coeqAtlftWrgBSemTpvmxwfqF4fYxm6DEoJc",
"4cgigubSc3/HY4tgpV9MADnR5z/isUmQb8VysGJOFpwyGG9gsvxalXcRGI8m1RdMIposOzFUrkDc0DST",
"E9plkERmpdfXoBdUvLVFJopLAm43mkplTyDRvrMSx4b4hUhmKuDZsSaQ8IJCftbVm9C+StdpnucDa6n2",
"EUkAgZPanLsahBe5fs0t7MyIzhdoLWbtfHNS75iQUxdX0zVitWqLBuemBGUKwMG5EYey92cU5VwBH24v",
"z24GTMye3w5P319Q0+r89GOlgKSDSP3ZiILZ7Ab2kt/NSnmlh1Nb1udmM/65Yj+tCV8Yk3yGVW+gSExA",
"aKJYxWMPcGGJipHDU7J0e2YlT2HAw3Poo3vkZ5N4v8wBxjDwHhEQwee/mrnCiogGIVPZr9daa5Kk0DB+",
"3Q2kHnukjvXHR0dH1lgi4zD56J+GgTyNFvR3PJZizFWPW9Kpr/wkkWvEbbu++NziTP8yIOTCYdYZ2qJH",
"LRjjW+wJ/N8vGgx+o/UqB5w0NEmsISurZOTNBtKDUTSwv1ULkx054WlhK+5KYZhGV0kAk/eLc5RAX4kn",
"6Q8ZnVE13R+dVerpbJQPCIY5va+/tc9oOSfFNMlYM8lIhuO0sruV3a3sfinZbZnjJxTtFfF8S4hmNtqA",
"wJk9QtByXqnvbK1hNWL5i6qzZK4YM5WlSFp75qM1DGiR6cU8msUH5WJR3RIitVHrqKeU3vG6f3nOszpm",
"+R0NqTvziR5VTsj3p2efrz58qNWSbNqlzs15gWInxpu8OClGjMTRtSb5S7DSBiN/CoM0rMhhbem8sjr6",
"Wkxu4ChgajYb84q71jiaXE6FDbJjVZUfXLsIq5OApUltQkdyqDPesc4KLTQvzZ8xhDEjbFXyXcl0xo+C",
"uYzfJI82T+lbtdgbMDGhN7TVIW/q8o/WnBFBuHU5hFX0I4TCWUIPMvdmuWBkac6Xd8jCjXUTsvBx44xM",
"jtyJK8d1T4vNK2xuGRTwZpC8UD0aWGZghZ/1Gvfc3DKjL7PA7sQtRHM087wQVnm6zputKjA0a7bIsrkr",
"DJcN0W89WJKxe5CG5LoyNYpoZE2R4nRJkF3dvdCFXJwEPObPAVQsTIMbNIOxpbQEJsh/WNiCPOg3D4ur",
"D7fbPo2nG7AW1u7ZqjMYugDxpN0Lu/r/G2eKdD5OyWXJzcsN9K2eY9jWr/OOpQkN7cSebAvhPDAhu1wp",
"1LxMIAuWOrPn05+B7zUtnpoZzbak+vwNQErlGD0AzDiEYwgSmMj0DwyjTDyzn7NNmRIyZ8eHOH5AUDZH",
"dFf5T/IO+l1HPGbN+opMILR3ikk8c5zsmfnfeFCMIY6cz+KdXg9YQRfCfEv5XxUhdo4Pjg6OGB3z57yd",
"d503B8cHR+JlLsMEe30biiKGE9NTiY/ympu2iiDGnvJr0E0HMrN/50J8/8jQIEPb2SwnR0flgT9BEJIp",
"Q9Fb0/fLmKg5cxvZeffXt24Hy2IDFMKsoYyj+EuM70+h/9D5RvuztSYQBIv6xdJmqGq1Q9lgnctlwHkk",
"9oDvwznxSALu75Ffu3oFbe3yH48PQUhZNZr04AygsMcuOvHhD/az/tszhzGExGDdn7PfsQdUwiHa3WPd",
"+d1pCWOntEWfNmChAHwERosJmEHCdOFfFUEopRk8kTy3846/NFfMWFpKRxcW3H/NxejKh+Hnb6W9/62M",
"rVHq+xDj+zQMFx5HaZDL1lRC3nO38xunEj+OiKjyJUoA00EP/8Zc2WTrqFFu/SSJqfnwzOzAfIzFDIQU",
"CzDwWCKfQD7s4GC8WTsYJig+xMkYBQHk1nFG35xOqshMUryoK/6t2/neS4QqZx9EWfKugTC+sWMZ8Q2p",
"d/lxYBUS5yP8HCTO6OF9zGXnWoiBY4dvWgFx6mVQmUwqsUViL5U4z2Pj2Syi17IQ4xJMsOfEAAe0FQOO",
"YoBTy+bEgK4g56hH4gcYUa0o/2bacB6bHo0P4WP8AD0QsZxxrLWIJlIzFsTEHN3QVtLhQLu7SAk1vEUm",
"SFh3St0lbHmCzhl0PzdR4yZULUiHbuyN2DlJxtlvVZSstjxHwX4Yp8GhfvK1W7ulXFzyOMEG8VCECYh8",
"WCLiM/pZhj/YjeDN45YB4qWRemS5MwRWY7VzBOv3yWLrv2g3QN97cohePOfBGEKjafvN3bWHP9h/n6v2",
"m0op1uqgtKHMa8s3slYS8QysNuOEfd2qEFrfZovkNTXKmyfmfxRijWOD7Vgr23IkrmEmI2+O4gqpxunn",
"m53CD+vEGtsWJdVqaP5cCbDXTvfnjIRb2t8t2p/BpXW4VXtvT3GLnFZNaEqpxD1R5OtQ4XSMQ+b/5ruE",
"rTt+gTA9AIVerrVtg2nrQb7hxnabziV2XJuy4ebLHCO51e0SIaitZxtR2ITy/uc2OY4Qiak0P/zBOf75",
"cJ7EY2g/XMp7Pw9kV8sk9phfl+Er/8LczvBq6usYk2EaXbN53X1TNqWnJNeWtV4FQYlcEZyeGH4PtqoV",
"LmPCEqHHCfoPT5YtssbwbAr8FWHJzUkACmHgcb+9x7bH+yDk+SDbVrPiyJEZDoH/cPiD/cfBi++NaEOt",
"ekGecthXkX7H3WmfG9NKPAzEnfTO53GyS6bN8XbAuI0yEuYTv93OxDyrE0uOB8IwfqLTm24EilQrRS/7",
"vcrE4kSX55gIH/7AEXbilsuRLvXL/BLhBmySH8zOKEJz7xybFJDRMsoOMkqJYBWrXI4qGSXCBjaRhovm",
"bTKbLnReeSQusUjju7EXsz+6dkcArw+zlCdAg+Hk7dscEMfrsIHmSUz/AYNWh+0Qa9oOkSxDvgfmc0nt",
"ZbXG2xT4kYBxCA8DMMGHKrm29dCI2amRtfPIFBBvDMM4muiv3lUiZzApHyn/PD4HrIbhjajLW+8ukymU",
"swQiPKkxY5l/pzBZZDwTgMkdCqrV3KZeMDjJnQK8L3XwcabetRVWPgcTVZDamNOpQg7RKeXtH5v1dXsJ",
"u5232xJ+9BSKZvMQzmBESrYBc15IOlBX5wA/GCUMa3j4g/6n5nqJ1xIYLzjfFAUIncDR1c4LXduUPgV0",
"yyo/X9HbIhRkTXAdltJbnU368QtVExq53hhWXzt//sbPPpuf9UYv6kwthfs45UmEdkREZPxcEhH2MwNx",
"ESGHYTyps1XCeOKFKIIyM4+AoyhRLuLJBYp4xYsdlyqbZXsdEQ2Usnjp1d7d5TWjoj6N9C/iyeqUT/+/",
"lz2vs9/waOV4rMSvqu3sA/l3K5JwkdjDD2huUarx/T2GeZ2qv8hhdR3LD2Krp2PJ6rzxwjIl+9xwxs2r",
"9Wyvl7ikb03vVrXnZJxJwqyu5lkLzU3ow/AwgON0YncU9nkdd+iBYnlwMAEowlmVHFEFMgAEHBjk4RkM",
"z9lU+3Ktuf6o+j+Pz/oXDAk1QfQMk5iKQlYVkpRrswvkbzWWXgdfpiarEXVQUE9gWENr1+i3AeN0UmIx",
"jefP+hd2lnfidQe7hjsh86JH1a4s8nMz22YX7wl+JvumW07OKx2KD3DBRAnPj2qflrbrGB26tbGJ4jVt",
"nef2LI4wCmAiSYw5umOfZUEIPHBPWJ4GhD2REM0EJUY81MKAnIpcak1hGcP7OIG1wKQRQeEagPnAt4bE",
"OWhAwqoAxT5iEvQJkal+H1AsAmqAL3sybtnZDbvq3deVS3XtzQDxp4hdf/gwIQBF2VPfqnWqjFVwCUou",
"lfd1XpzaErHK8YKqO5R4/MrEBLFIavWi2zJeeMBQ950VllHnEosrtZxk07gQQ8JzOc0DXPR4rY05QAn2",
"fgkgE3yU+xYe8P717l+/FsVW5UWs280R9uM5dJKHvKXruljr1eDd7BnV/XzaeqDqPFCKNxxDxxsYaIdM",
"DTtaaVy3O1lqn+FiX4y1jT+lkLhoyggM3S0zmJjBE9bjOhmCS1IXZhAtazmBK7720LKrh5ZSvTsHNV1r",
"41ZOUTJEmcnP5zxYPelzM4sEp2MMieeDKEDsRb2k67XaKFUr9m4xDBgbcVgINcLL8AAiPTuIWouW9NVb",
"NW801m4g1qWIaWV6XqZLvGQCneO3SqJ3LR5kXoTcA14En8TAVtHM275uFzFDAUeHi5uYeYkVKfMa29x3",
"uE3PsCCPOtYTBSA0gNuLr21dfF1md105hlf8qXjTnefdrbjDH4/HPf63y0MOUCcpGqco2y0zTnArYi/Q",
"ArkWA3gKa3sb/OIoGuSrlVYsvKRYcGX9rkaYVPVXBJ0qA95DBJtDT/lsrsGnO83Pr5yLJzFplbs17cQS",
"OrbIaJUJEevV5p4/7cqpTZVO8CUZbhNHAL5JSx8BXiDNorN8kJkVW/mwf1rewdhnEbSzrGxMhVkgJKN8",
"8uQlaeSJntUZGvk97QXChN/Vyio1+yrTWGE45kSjfDzhOWskGmqiIBwAbRSIwB4GRkFDaNYVBlH0zTLv",
"bxSoGsA1HmDxQvFlXiTy+jGMlP8X1h9XWoAW9WZo+zvZ+o613iixZY89+bUNC8hQNfCyl0aWZ5W8IYom",
"d7yazpYgPzWEKTz0HkXkgMMlQRavcDerDFh4WSc2FWzDNJISrfkLMV2Ktq85d+epFtubmVJUbpHc7hp3",
"HqOIOOrdGYpSAulxXP6VQPAQxE+RUsUN1PBHSK7p5PuuhJnCkxGI2gMB4bDudLV6sSdHJ8e9I/q/m6Oj",
"d+x//8cid2TB43t+ElmHgmSQqvhEHdSYwrcCsLIe8Xs2eHNwNy8bc6S2hHRkfNLKxx2Vj/ndWbuUxIc+",
"K8ppf+vCi3aql/cmecebvO4LSoYCZqrUlILg2Uxiz5dI2+pbFTZpCAOeEaX2ZlI2b9NhtG/ySjKqIBnW",
"LpkSOA/BoqqMBf1eKZl4k1ctmTgKmkimRCJtm5KJg+kqmBLRupVLrVwqyaWCXFijXBJJzlyib2Ui2bro",
"W5Gntg2/3eXwW04urDq42ysZ1v6SNl/myZWgiZEaxdXfKonOGVDRoQLS6klePMJVZ58GIa6Kkdu7+HyM",
"q0JMJjcFileOcrWl61ab2Ma5ijhXgY8mt9ySKV8o0lXSSJNQ111M8/q6Y13LOVwdeL+B2cTCXcU/3OJd",
"a2XGnke80slVkXXBwvWxrxlW7MBu1w/tyv8ynrXl/Z0Idall765ObjUhrZJ+RUyrMA8tfLvPYa0FA/hn",
"41EZrdryqCVctUZNwohqwV4CCOyxEyjdXGmD1UWq1iq8PY9V3SzvbC7u9Oc1yfWy3i3L74hJbpAHy+ts",
"89n8OsasuC6K/HiGoomi1xnEGEwqdPcQ+hA9tjKoiQyK0jAsUX608OZgEcYg8FDkgWjhidV2OwR+J4fz",
"EKACpRWnXFWGZDGA1wndboLoOHyhYq54/Df0q7xrORzdgxDD1mSw1E3hTGdgtWW52+X0LSKBe0ka1d1c",
"5DOO1d5dZBnG2vuL3c95iEUWOKcbjK1ljGPR9SAJEcQsTy50Am+Dof4hIE1AWVec/+6EcztmoNmTNwgU",
"CPU6ziVvDkw2HLj/dQrJlAsAFPlhGrAiU5hqrzgKF/rvqu6RSSBF4eJONqg1UsZxHEIQObzUyBXBcsDZ",
"Cz3aMJTqsr7ecMgK+mKvOLz7EEyYqn0SdBEnLKxCJwN1tgRR4MUpoX8K0xFT25E2kHbggXcO70Ea8lzZ",
"/6L08C8P3XtphCFT46bli5nu5KCdShLaWj2gpve6bSjQruXsz1mUuqErfx/S31e8X9It3MMA4XkIFj0W",
"BFFj74q2dFgRNBHfVxjB1TbwOR+MBVPstT2siVZVpz2PFPEKUqBPoM5uCGiy9EXKJW7YsW4kgVZ0taKr",
"qeiSfNKjfFItuXI8yqwHc7LwLGldheTqi8EGwf4Krvac255zX8k5d2vqLJMLrTb7mbRZTntsRbOJ47X9",
"Mc8NbyBjRfMH9grV1QaNHgvUaUipuabOkQKJRVTmtu+nNasZEoBC3Cx6VKeQ9r6pGMxZYKA1MHien1kk",
"p/ZLXZHtHMmBKGAhYkr/k1gdJUWhlf/bCRhR/N+ON7dcRmf04xhMloOB+zYnrKflBlhb3t5mKFyCy1ot",
"vkcF8B0Zulsi6CVY/FCUm6ridMJzd5GUOY7yfH9Qy8UjWc9qSV7Wp9fM9Z+TtfXjZ8vSOxrgdRanYcBf",
"ydKDpMly2aGMIzmuUsXlXkTWOJcdB6FIQSA89e5HB1Wg2tnp9XrqzGRi1egA+Xkl6lIV4Vqh2tpJRdlF",
"0AxFk3prSbRrLL0+QnIjptjbs49RBgVwTqY8DwnPVeb5UxQGCbSFbrAODaXf5gUJ35xWkuy9JKniz3WL",
"FzgXMkX++XwIEn+KHmGdFSRaCTBpd6MIGRE4F+G6p3JgB/Ehx7N6TyW8beju8hbZJmWS2Hex505SKZ8q",
"si16u/0sS4rrCpmWykIqx/4a80v5RLefyqYq0aRYuF4muZzLRNlvd3nUl/WDW2n0SqSR+1mrlUX7I4s0",
"xt+8JArjSV2kVBhPvBBFJduo7I6+iCcXKIKu3qBWDL3se6YQPsLQKWSIt8zNXMUMkg5orw8IhoE1Lxyk",
"itdjs2lwVJQoYR2aAjLivYxPSQB7KBAnQdX62ef3C76WhpNf6X0teODTByiBvnjpXgHFudZsGUiy/ptV",
"Uro0aFoUv00sV9QKSgpruuAinjRXAyLQqCJhOYuAwCKSyBK4f8N+PtMDX9YdmMMH5xPVpd7loUkvE4rD",
"IWwUfCOQ+nPT+BJRN4rYVM5ZEU9TJHITRavQuVqXMQ+NETfslQTeNM2SetghZrBe+WwnWu5lKV4mQmqp",
"fbunDU6MQQz5QQN+5xq4VB7DldlyeUqr8y9FfDYUTar5an+yMG0o6pQjoIlym6v8Irkypa2e2yc9J/hk",
"Cdar0HeHIKSEEU16cAZQ2JskcTqvvDilxp08BQryYmN4bABPDFBk3VPapE9bfKQN9uWl0+Y1oQkxDQtJ",
"WTeh5Z38bWIFtTbSY85Hn/JcdYzx6p9U6Ce3Am7cdF0J5Y2OdsebZe8lNKCBhlq+Np79jNy2Xi15iCEh",
"daFFmO2e7OLJLtXZDDRyQdFkJPrsSareLalJDTEr6Eh9T1pWMhzrDGhaGx/NUY/ED7AmGZ53ej3weLtq",
"rjmdoxvarLUn8SGLK7oeMHzgoZilIZ/I+KjWh140HilFctRqzKB+XKVAS5RRuxuxtzYiQ4Ckdc0s3KQL",
"ozhpy19rfjabMVNDBqtSOA7RUrxmXC5kypZ2NQuaadOt7nR4wgNcOAUn0HbN088wMvgMFy55TTKYVPjy",
"4By75sPksqIxgDIkenC+JIjZG7QVUvm4QDhMI/6OUji+XiTUg+3nywR6sKl3IMxDh0MP8qggliyDEFx4",
"jyBMoTmPkCr9/xdlt+N3rOlxp0v/dcL/dULFe3W+oS/rTTeULYMnLlUZh6rpnDUe7H+moaVe2rXRNZE9",
"5lIzWhhyV3chs3EtNkh7BGAIYLiocQuLxMQvEt7DKaGJzxfyHq89uvrkv7cz61DwpzBP4XcfwgBaijTy",
"vWnA5/UHk8NxGj7Yw+nep6GoYQRxJhNwpVCgfV6xYKDLbygc8EtKB9xcPLSvL3ZMPjA21YUEXrOU8EHk",
"w7Ai7JZ9544MLXF2zsS1SQ0eVsJHeM0GBUOAu0EhDgwJnIdgsXaxMdcKQv1QnoBhGg14cuJNFfFwrjsl",
"RBNDGsxylLRCameF1JBR6mbkE3OjOfpYuW/Owc/6GS7aa73M2bjUaZ0huz2xm07snvD9rpMPhDaw6mnO",
"g7iZah5KFfNaVTNHwK6o5vW41ThwrVX/2hQmih4RgU0DrGUvc9DYgH1tdaWMFdPwsVSUmMR2GxtmCp/O",
"aHFDMdN8gkpab93fWpQ0R4lbcDTH7YtGRHNwlwmEFoTx2t8CnpxsSUsC4uYXL/KtSS7ACIxD2EsAgT02",
"JmUPwWsr6FH5Q4//+5mLmBASWBY25+x3rI6dLoKG99nbaJ8811fD1lPo2HfNXytbOIXssmzJsRknwoxc",
"bTkb8vtY++K2GSfsz6vbfeGEzT4MXs4qeLGnwY6cy+HbG84VT3Ybc26V5pvB2ZgxX6MTpOxlZvEv7Gt7",
"gpTUqOFjqROkxHZ7gjSdIDNaXM+jIjHe4Q/+h4MR6AEBhHefxLO6R3mcGn4OU1As2wYb/7xV3v1tI7y7",
"jA34Orh2h3JbXlpSWSomzW1MA3nRlYTskHamNIldBPwcNvBOiIDNGr98u9yMX4GOHUmR4yi9DHaw2LdW",
"eL2w8LLKlSWEV5XVM0/iGSRTmOLejNqgfn25k6yLJ7qomJ26THbXqusXMdlPcVAg8Ds5nIcAFaiiOFKT",
"M0AZyy1TvjRTUg4w7Mu6TiD/TmEKndmQtW7Mgf9De+0R8+33S8h9ety2eX9IjvaWe/HuPcIEozhqZeIu",
"yUS1O2WJKDlnWZmYXfW5hIYm6rKxLjZ0CAi8oA3bd/i7XM1yHW+2azG5yZfZis524HV2EZZtpeHP81qD",
"4GONndvo44IXXMdNJm5ZtMUF/3VZiSt69OZxiPxFfYo62cHjHVwS1MnQyWvWo01Pd2hCy3KXRoXdaC+P",
"tp7lEYfAf6hOTDeiTbwnOJ7G8UP5OpV9/sq/ttepPCedjpMmp4cCqneJHbZUIfU2AimZxgn6Dwz4xG+3",
"M/EXSKYxL50PwjB+Mldn5RvE7EDOAro+Yx9XYsRDTEBCrOw4ol+5Hrs6TcnUY4eVIkPeYnltwwC6oghl",
"PfeRM98cnRjwoHMPQ5lQKzmsTCEIRNRIGHOCqfF4sg2HfpogsmD48eP4AUE6KCui8k2nB4bS/IySEOgO",
"LE0HdXlCR5ejIgEWBHKEWzks5PDlaKCjqoEkLmK5lcU7J4vLjKAk8eVohfSkhYFNDNa+xmAIyPNXZVbS",
"9dFsflLnVxXFXW0ZeocY2sp5jhxdqVFFXb/eNq6sRKnhfbu52ry7wISYZj4DVf82tzPtpcouXKqovVn3",
"NbOpCnMl62YFl73xgjOUsQT8nvjxurtaCXoL9dqXlA+tRNi5Qu26iFhLcXYnOVGbQ+yUEDibi2R4rK0m",
"PmyCY9+Sh7USpCokHmEWKy1ECCeCcPcOCC98iVfHKNti6ATSjhW5hlhSNlceZs1bFt7F7EdJGomtqolo",
"R9E8ZfEQ/HLXtNznnbBU2txHFfKFbfhLCJRsTZW+AN5MBAvUCZePkIz4sK1oeTnroFlWT4unQQzXHih2",
"+UAhd2kjUkPcxfee4uSh6gl6FtZpDZRoYySyEHWOiq8MqRQhVbXFKDJUGD3v6MntaJ34u3Yrp5H/8qnR",
"xCA2Fnr1t285/uHY2FJJQMPMQaPEZnJrW87dves3nfGWcdZzqVztnqcakgvv6tjbTDe8emWZYaKtvLny",
"UVM+AcpnY+E4XvaSSiKaHy+bZ8TWaxAaEmNrhQPb9NhaemwNL7jGTZSr8vhyybJNcDsX1dU8SDmCaY+n",
"O5lEO79H5UeG1QfUJgLnh/7PutvxHCfUamBBpvt8WV5gfTNoOgb32EwQ27Xse+X28tz+Wjjvl65/KdzN",
"09Ty/HzIrjhqXdT8IoQztA70QQ1fD9joLXO/PHNnuRGutVJYHMZVvNl5HLHtbh3aW3Jof9VxH7lkJcg2",
"qanJsD6Jg6dgDjdkR4zY2K282Rtjgm9Ya1H8RBaFiogXkQiV781EvVjG4mGobt2wwdaoYn32HItfkPdl",
"eaFWBqwdwAuAiTc4Z2mwp9ALgdxBW/ITgMkgsGY/eXNiyn6yhci9JmXFdMnTxtbs6I39ErLE/TrfTRZi",
"p5sJ1tLNonmV6ZgCeA/SkHTeHXVzomIbiZnU3G+XmXzE8zONFx6bwDyp+GR/Jb4Ns6u97Fm/vbXORG9q",
"TMcy5R7wxoD409JlT5XF9Orrk+v3JBwZrsHAIka9fFXyqouWh+3tUU3SJU4227i5wYd+Ekf1Fglt5f0d",
"jzOgSIImk9rwibMkjl61mbI3WSPVxqKATjuBRJnEBzXJgW0Htw2cdenMTcG7rDOljFMyim8yHe3QfKr9",
"zHtckYlzvPDuRbbPtSUE1aUIdk8KOl5sLi+oZhRsOTNoDhkrWOit2jVY6SU9tyFznSrdwx/0Pz35q1vx",
"rLIidr74oISz56W01OptYOUwuv1iWo5Vr4yb2GYdLVahMqOp2V1FniC+PXerLhNXZK59Dk/aYc7akOps",
"1eY+OPYbKeu1yIe6onVsVjWjs3DY8wp2uyUfNlXDThcQN9zB4eTro1TAC8O5+PbqTAW9xFxrKlTLAcGW",
"mxAFbqY8UweuF3r6LWN9mFLrMNtlhxm7RG7gLWPtt+gq20U/3hwkFGmW0JUCWLzxV/0yY0vwGRJPGGET",
"QSKbhevU+D7LwwSQFEOnKm6y7TLerRHrK/xMLsA9oChwgoo1bAzSZxQF9dDsvTOVoBn0wD0FtBQ8/QSw",
"fMusL6FzcnRy3Dui/7s5OnrH/vd/rM5q1v2UTmAmXqpWexSKjmuNYwrxGN7HCdwkyO/ZDOuEuQLL9yhC",
"eLo8zLL/VvG8LqDXiunNXQ6UPfGv9mqgaDu2Ho6NhEtv5k6ARUi7ZAUHngCNKro8++tpwh0fQuxzXdvW",
"DG/N8O2b4a1t2dqWL/IECq9YB5oJoLZeQb1+30BN5kzPU1CDNKTqscZrqFou4z8cyc6tF3GXvYibOxcp",
"AtiryKnWmGqNqb0xprJlZKJ6Lb5ZBZITgysvrQHmjb6RLEmY1uuwXqvEYgFs1i45/KH+7JVSOtUGKJpB",
"bmiz7HmYogEH1hTmRlTvbOSieXfbeIRi6KIFT80CEiy0URPEuBYG3OuyZHvFfZtUx60q3vcQx83KETfD",
"QGVtec4eC1YWLgZeBJ/sTwbdXwyKELz9ybNe/3itOk1LJWhbLam8bCSkKIFk3fyt5rltFu+tp4e3w9+K",
"xe3Xed253LpC0FVR+WZea2uyOOdHNstjaREIiexuD5ZMiWEatVJ4m1JY7oC2AU3kr9Vu2GJNuubmqC6B",
"X+VJsxW/TuJXGCR1NvHaRS4v2NDz4zQiNSE6rI1MfycrjYBHgEIwDiGTvpq4MZ/GP0LCC0LgMzbj3ove",
"uiyFe56lNLdZSx69Oalw8mm94ZY7+hySlstdmmf/FMMEH/ppksBqzsb8dMAberRbiXtvMUw+QnImBtsg",
"3dGZGtIZg7itefXyNa+gnyaILJgY9+P4AcHTlMquv75RUVV455onN0nubPsNZDxBZJqOD30QhmPgP1jJ",
"+SyezUNIIKfpKzq/Z9RHdCL+zu4jG/qK4vJMDl8g8DdHJzX3Cb6YNyjPO4UgEOUtw5hvhrGcqhLrzwVk",
"5nAnF5ifwxF9mIDELgpG9OtyiGNdm2ONwbN5nDHoGiIsjich3Ay9saF/cnrj6FszvWWI++noDUWPiECX",
"GrjSGuYdmNHtpL7pCDes70DMtUEtrk/kFD8RIiw3Jr/A1l50VqssDXQBexnl3RhOiDnaOwS+D+fE7nk7",
"Zd+x8rCJSUrUpm8+79PZjD+JD84nqq/RWkF9fOUm+mujABR5cWyX9t6dvhLIEqpWFG+k35vRF+/T2VQp",
"RDr4GuiLr7ylr0r64thegr7CeIIiO1ldxBPsocgDTDceVBgYF2ygDeVioSqYjr+lYtJO5+gwnkxg4KGo",
"PT6/8PG52/nt5GRb654nMaUB5rTtRwSRhdfzHkGIAjYZ3RTRBEUTD8qR7AYvI2zzUb7b+d6DEZ2qlwAC",
"e8wHTm1ofldjYuY4JTXcHKfEjZ3j9OWdVYLJ4h2rDdc6qWqsaUY9rv6pGZyNYYKnaN7gDKd1cjvHcR34",
"Jesm3kFtlMDNkzY/0Okoag91yxzqdAzWk+QcYPwUJxWhFCr9H+3gyfZVIvVajrk5I+lsCqKJmmiXrCWf",
"QRYoRLXivDWamhlN1azOKT/PjCvbUwmcUEmcVB27eQtcaVKpSKlN8b0EY5c4XiKvvWhsmX49JyVJ5es5",
"LOEQ+A8buaQa0ZF3+I6qRpI2vLR6hAkWIFQWghftZAgUhsmjwUofRPfxR0j+FIOutQyWBmmWFOT44Ojg",
"yJR2RIs8+kt1/eZQ4eqmYrGFaMsKYv8KvQSSNIlyyCucdKiYTaOI8o+a4ntPDtmL5/yVc5kFnuB4GscP",
"PRGIdvhD/ODwpJOqOtG6HKjGf3d/rSkGsgeCqYm2HAfm+PxRwtcqtpd3ThSfXOpkao3+Ei2+OTHHocCz",
"i5tCNpUlZKs5Rhhu2DU3y87yzXriJzn0PHxSoIZiZigmtEldlXpWYEdtV8ueO8SezCtT2qKmPKp4k/3x",
"XBN9zVsZA6tZcKYTz/Eg06qYZYOO35+I5caxo2LFrT+yFJRcevAlDyj2GGRmVtcXG6kkZPeSIjtBy5sq",
"2pHTGzZdITCQSpRt7x2UI6/pRT9aTrOU+ViF2QrapPi4xym5TbOyPw3ORTv5QqZJYhgFYPtAb/sP9EzH",
"IY1ilnwf062zsNw5oYHJ9Roeii35OKzlrZfmLf0V2iqM5WL2uXNXMztwJxhs/bZgHhmub+W51ZXnsm0b",
"h04SoWgetvLAaiCuxpw1ZqJThQa6SflSDIrxHtVNh1VTNqjIsAv8bMiKynOarqFk1fIFq8yATZI4nbNU",
"sxkIcqOsoLBOn+GiU5sGZMNCYsX07/JSqc0Av4PWxFIp5xsJLpmayBrcIrNqNE0WtFSOoJ2UXDcGdjnw",
"BvfMu41TSh0w6DKuCgGBmCieQti7h8SfwsCWkDwT/DtuSAkyWDLx0IulG9LgbZRnqM0u1GYX2kB2oUai",
"WcgG7HCrldPkTmJZxNbskQvmZ5DLG5ZyMmBqNVOwlXc7ZQJmpLisCVgM/BtDkMBEBf51jaGALJKMy4M0",
"CTvvOp3nb8//fwAAAP//VQx7VN0GAwA=",
}
// GetSwagger returns the content of the embedded swagger specification file
@@ -261,3 +261,15 @@ func ToWorkflowRunDisplayNamesList(
},
}
}
func ToWorkflowRunExternalIds(
externalIds []pgtype.UUID,
) gen.V1WorkflowRunExternalIdList {
result := make([]uuid.UUID, len(externalIds))
for ix, id := range externalIds {
result[ix] = uuid.MustParse(sqlchelpers.UUIDToStr(id))
}
return gen.V1WorkflowRunExternalIdList(result)
}
+35 -34
View File
@@ -101,46 +101,47 @@ export function SidePanel() {
onMouseDown={handleMouseDown}
/>
<div className="sticky top-0 z-20 flex flex-row w-full justify-between items-center bg-background px-4 pt-4 pb-2">
<div className="flex flex-row gap-x-2 items-center">
<Button
variant="ghost"
size="sm"
onClick={goBack}
disabled={!canGoBack}
className="rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 flex-shrink-0 border"
>
<ChevronLeftIcon className="h-4 w-4" />
<span className="sr-only">Go Back</span>
</Button>
<Button
variant="ghost"
size="sm"
onClick={goForward}
disabled={!canGoForward}
className="rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 flex-shrink-0 border"
>
<ChevronRightIcon className="h-4 w-4" />
<span className="sr-only">Go Forward</span>
</Button>
</div>
<div>
<Button
variant="ghost"
onClick={close}
className="rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 flex-shrink-0"
>
<Cross2Icon className="h-4 w-4" />
<span className="sr-only">Close</span>
</Button>
</div>
</div>
<div
className={cn(
'flex-1 p-4 overflow-auto side-panel-content',
isResizing && 'pointer-events-none',
)}
>
<div className="flex flex-row w-full justify-between items-center bg-background h-4 pt-2 pb-8">
<div className="flex flex-row gap-x-2 items-center">
<Button
variant="ghost"
size="sm"
onClick={goBack}
disabled={!canGoBack}
className="rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 flex-shrink-0 border"
>
<ChevronLeftIcon className="h-4 w-4" />
<span className="sr-only">Go Back</span>
</Button>
<Button
variant="ghost"
size="sm"
onClick={goForward}
disabled={!canGoForward}
className="rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 flex-shrink-0 border"
>
<ChevronRightIcon className="h-4 w-4" />
<span className="sr-only">Go Forward</span>
</Button>{' '}
</div>
<div>
<Button
variant="ghost"
onClick={close}
className="rounded-sm opacity-70 ring-offset-background transition-opacity hover:opacity-100 focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 flex-shrink-0"
>
<Cross2Icon className="h-4 w-4" />
<span className="sr-only">Close</span>
</Button>
</div>
</div>
{maybeContent.component}
</div>
</>
@@ -122,37 +122,44 @@ function OrganizationGroup({
</CommandItem>
{isExpanded &&
tenants.map((membership) => (
<CommandItem
key={membership.metadata.id}
value={`tenant-${membership.tenant?.metadata.id}`}
onSelect={() => {
invariant(membership.tenant);
onTenantSelect(membership.tenant);
onClose();
}}
className="text-sm cursor-pointer pl-6 hover:bg-accent focus:bg-accent"
>
<div className="flex items-center justify-between w-full">
<div className="flex items-center gap-2">
<div className="w-5 h-5 flex items-center justify-center">
<div className="w-2 h-2 rounded-full bg-green-500" />
tenants
.sort(
(a, b) =>
a.tenant?.name
?.toLowerCase()
.localeCompare(b.tenant?.name?.toLowerCase() ?? '') ?? 0,
)
.map((membership) => (
<CommandItem
key={membership.metadata.id}
value={`tenant-${membership.tenant?.metadata.id}`}
onSelect={() => {
invariant(membership.tenant);
onTenantSelect(membership.tenant);
onClose();
}}
className="text-sm cursor-pointer pl-6 hover:bg-accent focus:bg-accent"
>
<div className="flex items-center justify-between w-full">
<div className="flex items-center gap-2">
<div className="w-5 h-5 flex items-center justify-center">
<div className="w-2 h-2 rounded-full bg-green-500" />
</div>
<span className="text-muted-foreground">
{membership.tenant?.name}
</span>
</div>
<span className="text-muted-foreground">
{membership.tenant?.name}
</span>
<CheckIcon
className={cn(
'h-4 w-4',
currentTenant?.slug === membership.tenant?.slug
? 'opacity-100'
: 'opacity-0',
)}
/>
</div>
<CheckIcon
className={cn(
'h-4 w-4',
currentTenant?.slug === membership.tenant?.slug
? 'opacity-100'
: 'opacity-0',
)}
/>
</div>
</CommandItem>
))}
</CommandItem>
))}
</>
);
}
@@ -263,7 +270,12 @@ export function OrganizationSelector({
.map((org) => ({
organization: org,
tenants: orgMap.get(org.metadata.id) || [],
}));
}))
.sort((a, b) =>
a.organization.name
.toLowerCase()
.localeCompare(b.organization.name.toLowerCase()),
);
return {
currentOrgData: currentOrg
+17 -1
View File
@@ -12,9 +12,10 @@ import {
TabOption,
} from '@/pages/main/v1/workflow-runs-v1/$run/v2components/step-run-detail/step-run-detail';
import { DocPage } from '@/components/v1/docs/docs-button';
import { V1Event, V1Filter } from '@/lib/api';
import { V1Event, V1Filter, ScheduledWorkflows } from '@/lib/api';
import { FilterDetailView } from '@/pages/main/v1/filters/components/filter-detail-view';
import { ExpandedEventContent } from '@/pages/main/v1/events';
import { ExpandedScheduledRunContent } from '@/pages/main/v1/scheduled-runs/components/expanded-scheduled-run-content';
import { useTheme } from '@/components/theme-provider';
type SidePanelContent =
@@ -66,6 +67,12 @@ type UseSidePanelProps =
content: {
filter: V1Filter;
};
}
| {
type: 'scheduled-run-details';
content: {
scheduledRun: ScheduledWorkflows;
};
};
export function useSidePanelData(): SidePanelData {
@@ -112,6 +119,15 @@ export function useSidePanelData(): SidePanelData {
<FilterDetailView filterId={props.content.filter.metadata.id} />
),
};
case 'scheduled-run-details':
return {
isDocs: false,
component: (
<ExpandedScheduledRunContent
scheduledRun={props.content.scheduledRun}
/>
),
};
case 'docs':
const query = props.queryParams ?? {};
query.theme = theme;
+40
View File
@@ -114,6 +114,7 @@ import {
V1WebhookSourceName,
V1WorkflowRunDetails,
V1WorkflowRunDisplayNameList,
V1WorkflowRunExternalIdList,
WebhookWorkerCreateRequest,
WebhookWorkerCreated,
WebhookWorkerListResponse,
@@ -394,6 +395,45 @@ export class Api<
format: "json",
...params,
});
/**
* @description Lists external ids for workflow runs matching filters
*
* @tags Workflow Runs
* @name V1WorkflowRunExternalIdsList
* @summary List workflow run external ids
* @request GET:/api/v1/stable/tenants/{tenant}/workflow-runs/external-ids
* @secure
*/
v1WorkflowRunExternalIdsList = (
tenant: string,
query: {
/** A list of statuses to filter by */
statuses?: V1TaskStatus[];
/**
* The earliest date to filter by
* @format date-time
*/
since: string;
/**
* The latest date to filter by
* @format date-time
*/
until?: string;
/** Additional metadata k-v pairs to filter by */
additional_metadata?: string[];
/** The workflow ids to find runs for */
workflow_ids?: string[];
},
params: RequestParams = {},
) =>
this.request<V1WorkflowRunExternalIdList, APIErrors>({
path: `/api/v1/stable/tenants/${tenant}/workflow-runs/external-ids`,
method: "GET",
query: query,
secure: true,
format: "json",
...params,
});
/**
* @description Trigger a new workflow run
*
@@ -203,6 +203,7 @@ export enum TenantResource {
TASK_RUN = "TASK_RUN",
CRON = "CRON",
SCHEDULE = "SCHEDULE",
INCOMING_WEBHOOK = "INCOMING_WEBHOOK",
}
/** The status of the CEL evaluation */
@@ -563,6 +564,9 @@ export interface V1WorkflowRunDisplayNameList {
rows: V1WorkflowRunDisplayName[];
}
/** The list of external IDs */
export type V1WorkflowRunExternalIdList = string[];
export interface V1TriggerWorkflowRunRequest {
/** The name of the workflow. */
workflowName: string;
@@ -12,6 +12,7 @@ const resources: Record<TenantResource, string> = {
[TenantResource.TASK_RUN]: 'Task Runs',
[TenantResource.CRON]: 'Cron Triggers',
[TenantResource.SCHEDULE]: 'Schedule Triggers',
[TenantResource.INCOMING_WEBHOOK]: 'Incoming Webhooks',
};
const indicatorVariants = {
@@ -0,0 +1,99 @@
import { ScheduledWorkflows } from '@/lib/api';
import { Separator } from '@/components/v1/ui/separator';
import RelativeDate from '@/components/v1/molecules/relative-date';
import { CodeHighlighter } from '@/components/v1/ui/code-highlighter';
import { RunStatus } from '../../../workflow-runs/components/run-statuses';
import { Link } from 'react-router-dom';
import { useCurrentTenantId } from '@/hooks/use-tenant';
export function ExpandedScheduledRunContent({
scheduledRun,
}: {
scheduledRun: ScheduledWorkflows;
}) {
const { tenantId } = useCurrentTenantId();
return (
<div className="w-full">
<div className="space-y-6">
<div className="grid grid-cols-[auto_1fr] gap-x-4 gap-y-3 pb-4 border-b text-sm">
<span className="text-muted-foreground font-medium">Workflow</span>
<Link
to={`/tenants/${tenantId}/workflows/${scheduledRun.workflowId}`}
className="font-medium hover:underline truncate"
>
{scheduledRun.workflowName}
</Link>
<span className="text-muted-foreground font-medium">Trigger At</span>
<span className="font-medium">
<RelativeDate date={scheduledRun.triggerAt} />
</span>
<span className="text-muted-foreground font-medium">Status</span>
<div>
<RunStatus status={scheduledRun.workflowRunStatus || 'SCHEDULED'} />
</div>
{scheduledRun.workflowRunId && (
<>
<span className="text-muted-foreground font-medium">
Workflow Run
</span>
<Link
to={`/tenants/${tenantId}/runs/${scheduledRun.workflowRunId}`}
className="font-medium hover:underline truncate"
>
{scheduledRun.workflowRunName || scheduledRun.workflowRunId}
</Link>
</>
)}
<span className="text-muted-foreground font-medium">Created At</span>
<span className="font-medium">
<RelativeDate date={scheduledRun.metadata.createdAt} />
</span>
</div>
<div className="space-y-4">
{scheduledRun.input && (
<div>
<h3 className="text-sm font-semibold text-foreground mb-2">
Payload
</h3>
<Separator className="mb-3" />
<div className="max-h-96 overflow-y-auto rounded-lg">
<CodeHighlighter
language="json"
className="text-xs"
code={JSON.stringify(scheduledRun.input, null, 2)}
/>
</div>
</div>
)}
{scheduledRun.additionalMetadata &&
Object.keys(scheduledRun.additionalMetadata).length > 0 && (
<div>
<h3 className="text-sm font-semibold text-foreground mb-2">
Metadata
</h3>
<Separator className="mb-3" />
<div className="max-h-96 overflow-y-auto rounded-lg">
<CodeHighlighter
language="json"
className="text-xs"
code={JSON.stringify(
scheduledRun.additionalMetadata,
null,
2,
)}
/>
</div>
</div>
)}
</div>
</div>
</div>
);
}
@@ -14,6 +14,7 @@ export type RateLimitRow = RateLimit & {
};
export const ScheduledRunColumn = {
id: 'ID',
runId: 'Run ID',
status: 'Status',
triggerAt: 'Trigger At',
@@ -25,7 +26,8 @@ export const ScheduledRunColumn = {
export type ScheduledRunColumnKeys = keyof typeof ScheduledRunColumn;
export const idKey: ScheduledRunColumnKeys = 'runId';
export const idKey: ScheduledRunColumnKeys = 'id';
export const runIdKey: ScheduledRunColumnKeys = 'runId';
export const statusKey: ScheduledRunColumnKeys = 'status';
export const triggerAtKey: ScheduledRunColumnKeys = 'triggerAt';
export const workflowKey: ScheduledRunColumnKeys = 'workflow';
@@ -38,15 +40,33 @@ export const columns = ({
onDeleteClick,
selectedAdditionalMetaJobId,
handleSetSelectedAdditionalMetaJobId,
onRowClick,
}: {
tenantId: string;
onDeleteClick: (row: ScheduledWorkflows) => void;
selectedAdditionalMetaJobId: string | null;
handleSetSelectedAdditionalMetaJobId: (runId: string | null) => void;
onRowClick?: (row: ScheduledWorkflows) => void;
}): ColumnDef<ScheduledWorkflows>[] => {
return [
{
accessorKey: idKey,
header: ({ column }) => (
<DataTableColumnHeader column={column} title={ScheduledRunColumn.id} />
),
cell: ({ row }) => (
<div
className="cursor-pointer hover:underline min-w-fit whitespace-nowrap"
onClick={() => onRowClick?.(row.original)}
>
{row.original.metadata.id}
</div>
),
enableSorting: false,
enableHiding: true,
},
{
accessorKey: runIdKey,
header: ({ column }) => (
<DataTableColumnHeader
column={column}
@@ -61,6 +81,8 @@ export const columns = ({
</div>
</Link>
) : null,
enableSorting: false,
enableHiding: true,
},
{
accessorKey: statusKey,
@@ -71,8 +93,15 @@ export const columns = ({
/>
),
cell: ({ row }) => (
<RunStatus status={row.original.workflowRunStatus || 'SCHEDULED'} />
<div
className="cursor-pointer"
onClick={() => onRowClick?.(row.original)}
>
<RunStatus status={row.original.workflowRunStatus || 'SCHEDULED'} />
</div>
),
enableSorting: false,
enableHiding: true,
},
{
accessorKey: triggerAtKey,
@@ -83,10 +112,15 @@ export const columns = ({
/>
),
cell: ({ row }) => (
<div className="flex flex-row items-center gap-4">
<div
className="flex flex-row items-center gap-4 cursor-pointer"
onClick={() => onRowClick?.(row.original)}
>
<RelativeDate date={row.original.triggerAt} />
</div>
),
enableSorting: false,
enableHiding: true,
},
{
accessorKey: workflowKey,
@@ -138,6 +172,7 @@ export const columns = ({
);
},
enableSorting: false,
enableHiding: true,
},
{
accessorKey: createdAtKey,
@@ -152,7 +187,7 @@ export const columns = ({
<RelativeDate date={row.original.metadata.createdAt} />
</div>
),
enableSorting: true,
enableSorting: false,
enableHiding: true,
},
{
@@ -180,8 +215,8 @@ export const columns = ({
/>
</div>
),
enableHiding: true,
enableSorting: false,
enableHiding: true,
},
];
};
@@ -1,171 +0,0 @@
import { DataTable } from '@/components/v1/molecules/data-table/data-table.tsx';
import { useState } from 'react';
import { VisibilityState } from '@tanstack/react-table';
import { ScheduledWorkflows } from '@/lib/api';
import {
ToolbarFilters,
ToolbarType,
} from '@/components/v1/molecules/data-table/data-table-toolbar';
import { Button } from '@/components/v1/ui/button';
import { columns } from './scheduled-runs-columns';
import { DeleteScheduledRun } from './delete-scheduled-runs';
import { useCurrentTenantId } from '@/hooks/use-tenant';
import { TriggerWorkflowForm } from '../../workflows/$workflow/components/trigger-workflow-form';
import { DocsButton } from '@/components/v1/docs/docs-button';
import { docsPages } from '@/lib/generated/docs';
import { useScheduledRuns } from '../hooks/use-scheduled-runs';
import {
ScheduledRunColumn,
workflowKey,
statusKey,
metadataKey,
} from './scheduled-runs-columns';
import { workflowRunStatusFilters } from '../../workflow-runs-v1/hooks/use-toolbar-filters';
export interface ScheduledWorkflowRunsTableProps {
createdAfter?: string;
createdBefore?: string;
workflowId?: string;
parentWorkflowRunId?: string;
parentStepRunId?: string;
initColumnVisibility?: VisibilityState;
filterVisibility?: { [key: string]: boolean };
showMetrics?: boolean;
}
export function ScheduledRunsTable({
workflowId,
initColumnVisibility = {
createdAt: false,
},
filterVisibility = {},
parentWorkflowRunId,
parentStepRunId,
}: ScheduledWorkflowRunsTableProps) {
const { tenantId } = useCurrentTenantId();
const [triggerWorkflow, setTriggerWorkflow] = useState(false);
const [selectedAdditionalMetaJobId, setSelectedAdditionalMetaJobId] =
useState<string | null>(null);
const [columnVisibility, setColumnVisibility] =
useState<VisibilityState>(initColumnVisibility);
const {
scheduledRuns,
numPages,
isLoading,
refetch,
error,
pagination,
setPagination,
setPageSize,
columnFilters,
setColumnFilters,
workflowKeyFilters,
isRefetching,
resetFilters,
} = useScheduledRuns({
key: 'table',
workflowId,
parentWorkflowRunId,
parentStepRunId,
});
const filters: ToolbarFilters = [
{
columnId: workflowKey,
title: ScheduledRunColumn.workflow,
options: workflowKeyFilters,
type: ToolbarType.Radio,
},
{
columnId: statusKey,
title: ScheduledRunColumn.status,
options: workflowRunStatusFilters,
type: ToolbarType.Checkbox,
},
{
columnId: metadataKey,
title: ScheduledRunColumn.metadata,
type: ToolbarType.KeyValue,
},
].filter((filter) => filterVisibility[filter.columnId] != false);
const actions = [
<Button
key="schedule-run"
onClick={() => setTriggerWorkflow(true)}
className="h-8 border px-3"
>
Schedule Run
</Button>,
];
const [showScheduledRunRevoke, setShowScheduledRunRevoke] = useState<
ScheduledWorkflows | undefined
>(undefined);
return (
<>
<DeleteScheduledRun
scheduledRun={showScheduledRunRevoke}
setShowScheduledRunRevoke={setShowScheduledRunRevoke}
onSuccess={() => {
refetch();
setShowScheduledRunRevoke(undefined);
}}
/>
<TriggerWorkflowForm
defaultTimingOption="schedule"
defaultWorkflow={undefined}
show={triggerWorkflow}
onClose={() => setTriggerWorkflow(false)}
/>
<DataTable
emptyState={
<div className="w-full h-full flex flex-col gap-y-4 text-foreground py-8 justify-center items-center">
<p className="text-lg font-semibold">No runs found</p>
<div className="w-fit">
<DocsButton
doc={docsPages.home['scheduled-runs']}
size="full"
variant="outline"
label="Learn about scheduled runs"
/>
</div>
</div>
}
error={error}
isLoading={isLoading}
columns={columns({
tenantId,
onDeleteClick: (row) => {
setShowScheduledRunRevoke(row);
},
selectedAdditionalMetaJobId,
handleSetSelectedAdditionalMetaJobId: setSelectedAdditionalMetaJobId,
})}
columnVisibility={columnVisibility}
setColumnVisibility={setColumnVisibility}
data={scheduledRuns}
filters={filters}
rightActions={actions}
columnFilters={columnFilters}
setColumnFilters={setColumnFilters}
pagination={pagination}
setPagination={setPagination}
onSetPageSize={setPageSize}
pageCount={numPages}
showColumnToggle={true}
columnKeyToName={ScheduledRunColumn}
refetchProps={{
isRefetching,
onRefetch: refetch,
}}
onResetFilters={resetFilters}
showSelectedRows={false}
/>
</>
);
}
@@ -1,5 +1,181 @@
import { ScheduledRunsTable } from './components/scheduled-runs-table';
import { DataTable } from '@/components/v1/molecules/data-table/data-table.tsx';
import { useState } from 'react';
import { VisibilityState } from '@tanstack/react-table';
import { ScheduledWorkflows } from '@/lib/api';
import {
ToolbarFilters,
ToolbarType,
} from '@/components/v1/molecules/data-table/data-table-toolbar';
import { Button } from '@/components/v1/ui/button';
import { columns } from './components/scheduled-runs-columns';
import { DeleteScheduledRun } from './components/delete-scheduled-runs';
import { useCurrentTenantId } from '@/hooks/use-tenant';
import { TriggerWorkflowForm } from '../workflows/$workflow/components/trigger-workflow-form';
import { DocsButton } from '@/components/v1/docs/docs-button';
import { docsPages } from '@/lib/generated/docs';
import { useScheduledRuns } from './hooks/use-scheduled-runs';
import {
ScheduledRunColumn,
workflowKey,
statusKey,
metadataKey,
} from './components/scheduled-runs-columns';
import { workflowRunStatusFilters } from '../workflow-runs-v1/hooks/use-toolbar-filters';
import { useSidePanel } from '@/hooks/use-side-panel';
export default function ScheduledRuns() {
return <ScheduledRunsTable />;
export interface ScheduledWorkflowRunsTableProps {
createdAfter?: string;
createdBefore?: string;
workflowId?: string;
parentWorkflowRunId?: string;
parentStepRunId?: string;
initColumnVisibility?: VisibilityState;
filterVisibility?: { [key: string]: boolean };
showMetrics?: boolean;
}
export default function ScheduledRunsTable({
workflowId,
initColumnVisibility = {
createdAt: false,
},
filterVisibility = {},
parentWorkflowRunId,
parentStepRunId,
}: ScheduledWorkflowRunsTableProps) {
const { tenantId } = useCurrentTenantId();
const { open } = useSidePanel();
const [triggerWorkflow, setTriggerWorkflow] = useState(false);
const [selectedAdditionalMetaJobId, setSelectedAdditionalMetaJobId] =
useState<string | null>(null);
const [columnVisibility, setColumnVisibility] =
useState<VisibilityState>(initColumnVisibility);
const {
scheduledRuns,
numPages,
isLoading,
refetch,
error,
pagination,
setPagination,
setPageSize,
columnFilters,
setColumnFilters,
workflowKeyFilters,
isRefetching,
resetFilters,
} = useScheduledRuns({
key: 'table',
workflowId,
parentWorkflowRunId,
parentStepRunId,
});
const filters: ToolbarFilters = [
{
columnId: workflowKey,
title: ScheduledRunColumn.workflow,
options: workflowKeyFilters,
type: ToolbarType.Radio,
},
{
columnId: statusKey,
title: ScheduledRunColumn.status,
options: workflowRunStatusFilters,
type: ToolbarType.Checkbox,
},
{
columnId: metadataKey,
title: ScheduledRunColumn.metadata,
type: ToolbarType.KeyValue,
},
].filter((filter) => filterVisibility[filter.columnId] != false);
const actions = [
<Button
key="schedule-run"
onClick={() => setTriggerWorkflow(true)}
className="h-8 border px-3"
>
Schedule Run
</Button>,
];
const [showScheduledRunRevoke, setShowScheduledRunRevoke] = useState<
ScheduledWorkflows | undefined
>(undefined);
return (
<>
<DeleteScheduledRun
scheduledRun={showScheduledRunRevoke}
setShowScheduledRunRevoke={setShowScheduledRunRevoke}
onSuccess={() => {
refetch();
setShowScheduledRunRevoke(undefined);
}}
/>
<TriggerWorkflowForm
defaultTimingOption="schedule"
defaultWorkflow={undefined}
show={triggerWorkflow}
onClose={() => setTriggerWorkflow(false)}
/>
<DataTable
emptyState={
<div className="w-full h-full flex flex-col gap-y-4 text-foreground py-8 justify-center items-center">
<p className="text-lg font-semibold">No runs found</p>
<div className="w-fit">
<DocsButton
doc={docsPages.home['scheduled-runs']}
size="full"
variant="outline"
label="Learn about scheduled runs"
/>
</div>
</div>
}
error={error}
isLoading={isLoading}
columns={columns({
tenantId,
onDeleteClick: (row) => {
setShowScheduledRunRevoke(row);
},
selectedAdditionalMetaJobId,
handleSetSelectedAdditionalMetaJobId: setSelectedAdditionalMetaJobId,
onRowClick: (row) => {
open({
type: 'scheduled-run-details',
content: {
scheduledRun: row,
},
});
},
})}
columnVisibility={columnVisibility}
setColumnVisibility={setColumnVisibility}
data={scheduledRuns}
filters={filters}
rightActions={actions}
columnFilters={columnFilters}
setColumnFilters={setColumnFilters}
pagination={pagination}
setPagination={setPagination}
onSetPageSize={setPageSize}
pageCount={numPages}
showColumnToggle={true}
columnKeyToName={ScheduledRunColumn}
refetchProps={{
isRefetching,
onRefetch: refetch,
}}
onResetFilters={resetFilters}
showSelectedRows={false}
/>
</>
);
}
@@ -12,6 +12,7 @@ const resources: Record<TenantResource, string> = {
[TenantResource.TASK_RUN]: 'Task Runs',
[TenantResource.CRON]: 'Cron Triggers',
[TenantResource.SCHEDULE]: 'Schedule Triggers',
[TenantResource.INCOMING_WEBHOOK]: 'Incoming Webhooks',
};
const indicatorVariants = {
@@ -6,29 +6,34 @@ The runs client is a client for interacting with task and workflow runs within H
Methods:
| Name | Description |
| ------------------ | -------------------------------------------------------------------- |
| `get` | Get workflow run details for a given workflow run ID. |
| `aio_get` | Get workflow run details for a given workflow run ID. |
| `get_status` | Get workflow run status for a given workflow run ID. |
| `aio_get_status` | Get workflow run status for a given workflow run ID. |
| `list` | List task runs according to a set of filters. |
| `aio_list` | List task runs according to a set of filters. |
| `create` | Trigger a new workflow run. |
| `aio_create` | Trigger a new workflow run. |
| `replay` | Replay a task or workflow run. |
| `aio_replay` | Replay a task or workflow run. |
| `bulk_replay` | Replay task or workflow runs in bulk, according to a set of filters. |
| `aio_bulk_replay` | Replay task or workflow runs in bulk, according to a set of filters. |
| `cancel` | Cancel a task or workflow run. |
| `aio_cancel` | Cancel a task or workflow run. |
| `bulk_cancel` | Cancel task or workflow runs in bulk, according to a set of filters. |
| `aio_bulk_cancel` | Cancel task or workflow runs in bulk, according to a set of filters. |
| `get_result` | Get the result of a workflow run by its external ID. |
| `aio_get_result` | Get the result of a workflow run by its external ID. |
| `get_run_ref` | Get a reference to a workflow run. |
| `get_task_run` | Get task run details for a given task run ID. |
| `aio_get_task_run` | Get task run details for a given task run ID. |
| Name | Description |
| -------------------------------------------- | -------------------------------------------------------------------- |
| `get` | Get workflow run details for a given workflow run ID. |
| `aio_get` | Get workflow run details for a given workflow run ID. |
| `get_status` | Get workflow run status for a given workflow run ID. |
| `aio_get_status` | Get workflow run status for a given workflow run ID. |
| `list` | List task runs according to a set of filters. |
| `aio_list` | List task runs according to a set of filters. |
| `create` | Trigger a new workflow run. |
| `aio_create` | Trigger a new workflow run. |
| `replay` | Replay a task or workflow run. |
| `aio_replay` | Replay a task or workflow run. |
| `bulk_replay` | Replay task or workflow runs in bulk, according to a set of filters. |
| `aio_bulk_replay` | Replay task or workflow runs in bulk, according to a set of filters. |
| `cancel` | Cancel a task or workflow run. |
| `aio_cancel` | Cancel a task or workflow run. |
| `bulk_cancel` | Cancel task or workflow runs in bulk, according to a set of filters. |
| `aio_bulk_cancel` | Cancel task or workflow runs in bulk, according to a set of filters. |
| `get_result` | Get the result of a workflow run by its external ID. |
| `aio_get_result` | Get the result of a workflow run by its external ID. |
| `get_run_ref` | Get a reference to a workflow run. |
| `get_task_run` | Get task run details for a given task run ID. |
| `aio_get_task_run` | Get task run details for a given task run ID. |
| `bulk_cancel_by_filters_with_pagination` | Cancel runs matching the specified filters in chunks. |
| `bulk_replay_by_filters_with_pagination` | Replay runs matching the specified filters in chunks. |
| `aio_bulk_cancel_by_filters_with_pagination` | Cancel runs matching the specified filters in chunks. |
| `aio_bulk_replay_by_filters_with_pagination` | Replay runs matching the specified filters in chunks. |
| `subscribe_to_stream` | |
### Functions
@@ -397,3 +402,85 @@ Returns:
| Type | Description |
| --------------- | ----------------------------------------------- |
| `V1TaskSummary` | Task run details for the specified task run ID. |
#### `bulk_cancel_by_filters_with_pagination`
Cancel runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
Parameters:
| Name | Type | Description | Default |
| --------------------- | ---------------------------- | -------------------------------------------------------- | ------- |
| `sleep_time` | `int` | The time to sleep between processing chunks, in seconds. | `3` |
| `chunk_size` | `int` | The maximum number of run IDs to process in each chunk. | `500` |
| `since` | `datetime \| None` | The start time for filtering runs. | `None` |
| `until` | `datetime \| None` | The end time for filtering runs. | `None` |
| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter runs by. | `None` |
| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter runs by. | `None` |
| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter runs by. | `None` |
#### `bulk_replay_by_filters_with_pagination`
Replay runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
Parameters:
| Name | Type | Description | Default |
| --------------------- | ---------------------------- | -------------------------------------------------------- | ------- |
| `sleep_time` | `int` | The time to sleep between processing chunks, in seconds. | `3` |
| `chunk_size` | `int` | The maximum number of run IDs to process in each chunk. | `500` |
| `since` | `datetime \| None` | The start time for filtering runs. | `None` |
| `until` | `datetime \| None` | The end time for filtering runs. | `None` |
| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter runs by. | `None` |
| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter runs by. | `None` |
| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter runs by. | `None` |
#### `aio_bulk_cancel_by_filters_with_pagination`
Cancel runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
Parameters:
| Name | Type | Description | Default |
| --------------------- | ---------------------------- | -------------------------------------------------------- | ------- |
| `sleep_time` | `int` | The time to sleep between processing chunks, in seconds. | `3` |
| `chunk_size` | `int` | The maximum number of run IDs to process in each chunk. | `500` |
| `since` | `datetime \| None` | The start time for filtering runs. | `None` |
| `until` | `datetime \| None` | The end time for filtering runs. | `None` |
| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter runs by. | `None` |
| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter runs by. | `None` |
| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter runs by. | `None` |
#### `aio_bulk_replay_by_filters_with_pagination`
Replay runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
Parameters:
| Name | Type | Description | Default |
| --------------------- | ---------------------------- | -------------------------------------------------------- | ------- |
| `sleep_time` | `int` | The time to sleep between processing chunks, in seconds. | `3` |
| `chunk_size` | `int` | The maximum number of run IDs to process in each chunk. | `500` |
| `since` | `datetime \| None` | The start time for filtering runs. | `None` |
| `until` | `datetime \| None` | The end time for filtering runs. | `None` |
| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter runs by. | `None` |
| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter runs by. | `None` |
| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter runs by. | `None` |
#### `subscribe_to_stream`
+244 -7
View File
@@ -177,13 +177,14 @@ const (
// Defines values for TenantResource.
const (
CRON TenantResource = "CRON"
EVENT TenantResource = "EVENT"
SCHEDULE TenantResource = "SCHEDULE"
TASKRUN TenantResource = "TASK_RUN"
WORKER TenantResource = "WORKER"
WORKERSLOT TenantResource = "WORKER_SLOT"
WORKFLOWRUN TenantResource = "WORKFLOW_RUN"
CRON TenantResource = "CRON"
EVENT TenantResource = "EVENT"
INCOMINGWEBHOOK TenantResource = "INCOMING_WEBHOOK"
SCHEDULE TenantResource = "SCHEDULE"
TASKRUN TenantResource = "TASK_RUN"
WORKER TenantResource = "WORKER"
WORKERSLOT TenantResource = "WORKER_SLOT"
WORKFLOWRUN TenantResource = "WORKFLOW_RUN"
)
// Defines values for TenantUIVersion.
@@ -1906,6 +1907,9 @@ type V1WorkflowRunDisplayNameList struct {
Rows []V1WorkflowRunDisplayName `json:"rows"`
}
// V1WorkflowRunExternalIdList The list of external IDs
type V1WorkflowRunExternalIdList = []openapi_types.UUID
// V1WorkflowType defines model for V1WorkflowType.
type V1WorkflowType string
@@ -2448,6 +2452,24 @@ type V1WorkflowRunDisplayNamesListParams struct {
ExternalIds []openapi_types.UUID `form:"external_ids" json:"external_ids"`
}
// V1WorkflowRunExternalIdsListParams defines parameters for V1WorkflowRunExternalIdsList.
type V1WorkflowRunExternalIdsListParams struct {
// Statuses A list of statuses to filter by
Statuses *[]V1TaskStatus `form:"statuses,omitempty" json:"statuses,omitempty"`
// Since The earliest date to filter by
Since time.Time `form:"since" json:"since"`
// Until The latest date to filter by
Until *time.Time `form:"until,omitempty" json:"until,omitempty"`
// AdditionalMetadata Additional metadata k-v pairs to filter by
AdditionalMetadata *[]string `form:"additional_metadata,omitempty" json:"additional_metadata,omitempty"`
// WorkflowIds The workflow ids to find runs for
WorkflowIds *[]openapi_types.UUID `form:"workflow_ids,omitempty" json:"workflow_ids,omitempty"`
}
// V1WorkflowRunTaskEventsListParams defines parameters for V1WorkflowRunTaskEventsList.
type V1WorkflowRunTaskEventsListParams struct {
// Offset The number to skip
@@ -3129,6 +3151,9 @@ type ClientInterface interface {
// V1WorkflowRunDisplayNamesList request
V1WorkflowRunDisplayNamesList(ctx context.Context, tenant openapi_types.UUID, params *V1WorkflowRunDisplayNamesListParams, reqEditors ...RequestEditorFn) (*http.Response, error)
// V1WorkflowRunExternalIdsList request
V1WorkflowRunExternalIdsList(ctx context.Context, tenant openapi_types.UUID, params *V1WorkflowRunExternalIdsListParams, reqEditors ...RequestEditorFn) (*http.Response, error)
// V1WorkflowRunCreateWithBody request with any body
V1WorkflowRunCreateWithBody(ctx context.Context, tenant openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error)
@@ -4005,6 +4030,18 @@ func (c *Client) V1WorkflowRunDisplayNamesList(ctx context.Context, tenant opena
return c.Client.Do(req)
}
func (c *Client) V1WorkflowRunExternalIdsList(ctx context.Context, tenant openapi_types.UUID, params *V1WorkflowRunExternalIdsListParams, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewV1WorkflowRunExternalIdsListRequest(c.Server, tenant, params)
if err != nil {
return nil, err
}
req = req.WithContext(ctx)
if err := c.applyEditors(ctx, req, reqEditors); err != nil {
return nil, err
}
return c.Client.Do(req)
}
func (c *Client) V1WorkflowRunCreateWithBody(ctx context.Context, tenant openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) {
req, err := NewV1WorkflowRunCreateRequestWithBody(c.Server, tenant, contentType, body)
if err != nil {
@@ -7561,6 +7598,122 @@ func NewV1WorkflowRunDisplayNamesListRequest(server string, tenant openapi_types
return req, nil
}
// NewV1WorkflowRunExternalIdsListRequest generates requests for V1WorkflowRunExternalIdsList
func NewV1WorkflowRunExternalIdsListRequest(server string, tenant openapi_types.UUID, params *V1WorkflowRunExternalIdsListParams) (*http.Request, error) {
var err error
var pathParam0 string
pathParam0, err = runtime.StyleParamWithLocation("simple", false, "tenant", runtime.ParamLocationPath, tenant)
if err != nil {
return nil, err
}
serverURL, err := url.Parse(server)
if err != nil {
return nil, err
}
operationPath := fmt.Sprintf("/api/v1/stable/tenants/%s/workflow-runs/external-ids", pathParam0)
if operationPath[0] == '/' {
operationPath = "." + operationPath
}
queryURL, err := serverURL.Parse(operationPath)
if err != nil {
return nil, err
}
if params != nil {
queryValues := queryURL.Query()
if params.Statuses != nil {
if queryFrag, err := runtime.StyleParamWithLocation("form", true, "statuses", runtime.ParamLocationQuery, *params.Statuses); err != nil {
return nil, err
} else if parsed, err := url.ParseQuery(queryFrag); err != nil {
return nil, err
} else {
for k, v := range parsed {
for _, v2 := range v {
queryValues.Add(k, v2)
}
}
}
}
if queryFrag, err := runtime.StyleParamWithLocation("form", true, "since", runtime.ParamLocationQuery, params.Since); err != nil {
return nil, err
} else if parsed, err := url.ParseQuery(queryFrag); err != nil {
return nil, err
} else {
for k, v := range parsed {
for _, v2 := range v {
queryValues.Add(k, v2)
}
}
}
if params.Until != nil {
if queryFrag, err := runtime.StyleParamWithLocation("form", true, "until", runtime.ParamLocationQuery, *params.Until); err != nil {
return nil, err
} else if parsed, err := url.ParseQuery(queryFrag); err != nil {
return nil, err
} else {
for k, v := range parsed {
for _, v2 := range v {
queryValues.Add(k, v2)
}
}
}
}
if params.AdditionalMetadata != nil {
if queryFrag, err := runtime.StyleParamWithLocation("form", true, "additional_metadata", runtime.ParamLocationQuery, *params.AdditionalMetadata); err != nil {
return nil, err
} else if parsed, err := url.ParseQuery(queryFrag); err != nil {
return nil, err
} else {
for k, v := range parsed {
for _, v2 := range v {
queryValues.Add(k, v2)
}
}
}
}
if params.WorkflowIds != nil {
if queryFrag, err := runtime.StyleParamWithLocation("form", true, "workflow_ids", runtime.ParamLocationQuery, *params.WorkflowIds); err != nil {
return nil, err
} else if parsed, err := url.ParseQuery(queryFrag); err != nil {
return nil, err
} else {
for k, v := range parsed {
for _, v2 := range v {
queryValues.Add(k, v2)
}
}
}
}
queryURL.RawQuery = queryValues.Encode()
}
req, err := http.NewRequest("GET", queryURL.String(), nil)
if err != nil {
return nil, err
}
return req, nil
}
// NewV1WorkflowRunCreateRequest calls the generic V1WorkflowRunCreate builder with application/json body
func NewV1WorkflowRunCreateRequest(server string, tenant openapi_types.UUID, body V1WorkflowRunCreateJSONRequestBody) (*http.Request, error) {
var bodyReader io.Reader
@@ -12486,6 +12639,9 @@ type ClientWithResponsesInterface interface {
// V1WorkflowRunDisplayNamesListWithResponse request
V1WorkflowRunDisplayNamesListWithResponse(ctx context.Context, tenant openapi_types.UUID, params *V1WorkflowRunDisplayNamesListParams, reqEditors ...RequestEditorFn) (*V1WorkflowRunDisplayNamesListResponse, error)
// V1WorkflowRunExternalIdsListWithResponse request
V1WorkflowRunExternalIdsListWithResponse(ctx context.Context, tenant openapi_types.UUID, params *V1WorkflowRunExternalIdsListParams, reqEditors ...RequestEditorFn) (*V1WorkflowRunExternalIdsListResponse, error)
// V1WorkflowRunCreateWithBodyWithResponse request with any body
V1WorkflowRunCreateWithBodyWithResponse(ctx context.Context, tenant openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1WorkflowRunCreateResponse, error)
@@ -13728,6 +13884,31 @@ func (r V1WorkflowRunDisplayNamesListResponse) StatusCode() int {
return 0
}
type V1WorkflowRunExternalIdsListResponse struct {
Body []byte
HTTPResponse *http.Response
JSON200 *V1WorkflowRunExternalIdList
JSON400 *APIErrors
JSON403 *APIErrors
JSON501 *APIErrors
}
// Status returns HTTPResponse.Status
func (r V1WorkflowRunExternalIdsListResponse) Status() string {
if r.HTTPResponse != nil {
return r.HTTPResponse.Status
}
return http.StatusText(0)
}
// StatusCode returns HTTPResponse.StatusCode
func (r V1WorkflowRunExternalIdsListResponse) StatusCode() int {
if r.HTTPResponse != nil {
return r.HTTPResponse.StatusCode
}
return 0
}
type V1WorkflowRunCreateResponse struct {
Body []byte
HTTPResponse *http.Response
@@ -16296,6 +16477,15 @@ func (c *ClientWithResponses) V1WorkflowRunDisplayNamesListWithResponse(ctx cont
return ParseV1WorkflowRunDisplayNamesListResponse(rsp)
}
// V1WorkflowRunExternalIdsListWithResponse request returning *V1WorkflowRunExternalIdsListResponse
func (c *ClientWithResponses) V1WorkflowRunExternalIdsListWithResponse(ctx context.Context, tenant openapi_types.UUID, params *V1WorkflowRunExternalIdsListParams, reqEditors ...RequestEditorFn) (*V1WorkflowRunExternalIdsListResponse, error) {
rsp, err := c.V1WorkflowRunExternalIdsList(ctx, tenant, params, reqEditors...)
if err != nil {
return nil, err
}
return ParseV1WorkflowRunExternalIdsListResponse(rsp)
}
// V1WorkflowRunCreateWithBodyWithResponse request with arbitrary body returning *V1WorkflowRunCreateResponse
func (c *ClientWithResponses) V1WorkflowRunCreateWithBodyWithResponse(ctx context.Context, tenant openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1WorkflowRunCreateResponse, error) {
rsp, err := c.V1WorkflowRunCreateWithBody(ctx, tenant, contentType, body, reqEditors...)
@@ -18877,6 +19067,53 @@ func ParseV1WorkflowRunDisplayNamesListResponse(rsp *http.Response) (*V1Workflow
return response, nil
}
// ParseV1WorkflowRunExternalIdsListResponse parses an HTTP response from a V1WorkflowRunExternalIdsListWithResponse call
func ParseV1WorkflowRunExternalIdsListResponse(rsp *http.Response) (*V1WorkflowRunExternalIdsListResponse, error) {
bodyBytes, err := io.ReadAll(rsp.Body)
defer func() { _ = rsp.Body.Close() }()
if err != nil {
return nil, err
}
response := &V1WorkflowRunExternalIdsListResponse{
Body: bodyBytes,
HTTPResponse: rsp,
}
switch {
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200:
var dest V1WorkflowRunExternalIdList
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
return nil, err
}
response.JSON200 = &dest
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400:
var dest APIErrors
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
return nil, err
}
response.JSON400 = &dest
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403:
var dest APIErrors
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
return nil, err
}
response.JSON403 = &dest
case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 501:
var dest APIErrors
if err := json.Unmarshal(bodyBytes, &dest); err != nil {
return nil, err
}
response.JSON501 = &dest
}
return response, nil
}
// ParseV1WorkflowRunCreateResponse parses an HTTP response from a V1WorkflowRunCreateWithResponse call
func ParseV1WorkflowRunCreateResponse(rsp *http.Response) (*V1WorkflowRunCreateResponse, error) {
bodyBytes, err := io.ReadAll(rsp.Body)
+71
View File
@@ -244,6 +244,8 @@ type OLAPRepository interface {
StoreCELEvaluationFailures(ctx context.Context, tenantId string, failures []CELEvaluationFailure) error
AnalyzeOLAPTables(ctx context.Context) error
ListWorkflowRunExternalIds(ctx context.Context, tenantId string, opts ListWorkflowRunOpts) ([]pgtype.UUID, error)
}
type OLAPRepositoryImpl struct {
@@ -943,6 +945,75 @@ func (r *OLAPRepositoryImpl) ListWorkflowRuns(ctx context.Context, tenantId stri
return res, int(count), nil
}
func (r *OLAPRepositoryImpl) ListWorkflowRunExternalIds(ctx context.Context, tenantId string, opts ListWorkflowRunOpts) ([]pgtype.UUID, error) {
ctx, span := telemetry.NewSpan(ctx, "list-workflow-run-external-ids-olap")
defer span.End()
tx, commit, rollback, err := sqlchelpers.PrepareTx(ctx, r.readPool, r.l, 30000)
if err != nil {
return nil, err
}
defer rollback()
params := sqlcv1.ListWorkflowRunExternalIdsParams{
Tenantid: sqlchelpers.UUIDFromStr(tenantId),
Since: sqlchelpers.TimestamptzFromTime(opts.CreatedAfter),
}
statuses := make([]string, 0)
for _, status := range opts.Statuses {
statuses = append(statuses, string(status))
}
if len(statuses) == 0 {
statuses = []string{
string(sqlcv1.V1ReadableStatusOlapQUEUED),
string(sqlcv1.V1ReadableStatusOlapRUNNING),
string(sqlcv1.V1ReadableStatusOlapCOMPLETED),
string(sqlcv1.V1ReadableStatusOlapCANCELLED),
string(sqlcv1.V1ReadableStatusOlapFAILED),
}
}
params.Statuses = statuses
if len(opts.WorkflowIds) > 0 {
workflowIdParams := make([]pgtype.UUID, 0)
for _, id := range opts.WorkflowIds {
workflowIdParams = append(workflowIdParams, sqlchelpers.UUIDFromStr(id.String()))
}
params.WorkflowIds = workflowIdParams
}
until := opts.FinishedBefore
if until != nil {
params.Until = sqlchelpers.TimestamptzFromTime(*until)
}
for key, value := range opts.AdditionalMetadata {
params.AdditionalMetaKeys = append(params.AdditionalMetaKeys, key)
params.AdditionalMetaValues = append(params.AdditionalMetaValues, value.(string))
}
externalIds, err := r.queries.ListWorkflowRunExternalIds(ctx, tx, params)
if err != nil {
return nil, err
}
if err := commit(ctx); err != nil {
return nil, err
}
return externalIds, nil
}
func (r *OLAPRepositoryImpl) ListTaskRunEvents(ctx context.Context, tenantId string, taskId int64, taskInsertedAt pgtype.Timestamptz, limit, offset int64) ([]*sqlcv1.ListTaskEventsRow, error) {
rows, err := r.queries.ListTaskEvents(ctx, r.readPool, sqlcv1.ListTaskEventsParams{
Tenantid: sqlchelpers.UUIDFromStr(tenantId),
+27
View File
@@ -1582,3 +1582,30 @@ INSERT INTO v1_cel_evaluation_failures_olap (
SELECT @tenantId::UUID, source, error
FROM inputs
;
-- name: ListWorkflowRunExternalIds :many
SELECT external_id
FROM v1_runs_olap
WHERE
tenant_id = @tenantId::UUID
AND inserted_at > @since::TIMESTAMPTZ
AND (
sqlc.narg('until')::TIMESTAMPTZ IS NULL
OR inserted_at <= sqlc.narg('until')::TIMESTAMPTZ
)
AND readable_status = ANY(CAST(@statuses::TEXT[] AS v1_readable_status_olap[]))
AND (
sqlc.narg('additionalMetaKeys')::text[] IS NULL
OR sqlc.narg('additionalMetaValues')::text[] IS NULL
OR EXISTS (
SELECT 1 FROM jsonb_each_text(additional_metadata) kv
JOIN LATERAL (
SELECT unnest(sqlc.narg('additionalMetaKeys')::text[]) AS k,
unnest(sqlc.narg('additionalMetaValues')::text[]) AS v
) AS u ON kv.key = u.k AND kv.value = u.v
)
)
AND (
sqlc.narg('workflowIds')::UUID[] IS NULL OR workflow_id = ANY(sqlc.narg('workflowIds')::UUID[])
)
;
+65
View File
@@ -1495,6 +1495,71 @@ func (q *Queries) ListWorkflowRunDisplayNames(ctx context.Context, db DBTX, arg
return items, nil
}
const listWorkflowRunExternalIds = `-- name: ListWorkflowRunExternalIds :many
SELECT external_id
FROM v1_runs_olap
WHERE
tenant_id = $1::UUID
AND inserted_at > $2::TIMESTAMPTZ
AND (
$3::TIMESTAMPTZ IS NULL
OR inserted_at <= $3::TIMESTAMPTZ
)
AND readable_status = ANY(CAST($4::TEXT[] AS v1_readable_status_olap[]))
AND (
$5::text[] IS NULL
OR $6::text[] IS NULL
OR EXISTS (
SELECT 1 FROM jsonb_each_text(additional_metadata) kv
JOIN LATERAL (
SELECT unnest($5::text[]) AS k,
unnest($6::text[]) AS v
) AS u ON kv.key = u.k AND kv.value = u.v
)
)
AND (
$7::UUID[] IS NULL OR workflow_id = ANY($7::UUID[])
)
`
type ListWorkflowRunExternalIdsParams struct {
Tenantid pgtype.UUID `json:"tenantid"`
Since pgtype.Timestamptz `json:"since"`
Until pgtype.Timestamptz `json:"until"`
Statuses []string `json:"statuses"`
AdditionalMetaKeys []string `json:"additionalMetaKeys"`
AdditionalMetaValues []string `json:"additionalMetaValues"`
WorkflowIds []pgtype.UUID `json:"workflowIds"`
}
func (q *Queries) ListWorkflowRunExternalIds(ctx context.Context, db DBTX, arg ListWorkflowRunExternalIdsParams) ([]pgtype.UUID, error) {
rows, err := db.Query(ctx, listWorkflowRunExternalIds,
arg.Tenantid,
arg.Since,
arg.Until,
arg.Statuses,
arg.AdditionalMetaKeys,
arg.AdditionalMetaValues,
arg.WorkflowIds,
)
if err != nil {
return nil, err
}
defer rows.Close()
var items []pgtype.UUID
for rows.Next() {
var external_id pgtype.UUID
if err := rows.Scan(&external_id); err != nil {
return nil, err
}
items = append(items, external_id)
}
if err := rows.Err(); err != nil {
return nil, err
}
return items, nil
}
const populateDAGMetadata = `-- name: PopulateDAGMetadata :one
WITH run AS (
SELECT
+6
View File
@@ -5,6 +5,12 @@ All notable changes to Hatchet's Python SDK will be documented in this changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [1.20.1] - 2025-10-14
### Added
- Adds wrapper methods for bulk cancelling / replaying large numbers of runs with pagination.
## [1.20.0] - 2025-10-3
### Removed
+5
View File
@@ -24,3 +24,8 @@
- get_run_ref
- get_task_run
- aio_get_task_run
- bulk_cancel_by_filters_with_pagination
- bulk_replay_by_filters_with_pagination
- aio_bulk_cancel_by_filters_with_pagination
- aio_bulk_replay_by_filters_with_pagination
- subscribe_to_stream
@@ -150,6 +150,7 @@ from hatchet_sdk.clients.rest.models.rate_limit_order_by_field import (
RateLimitOrderByField,
)
from hatchet_sdk.clients.rest.models.recent_step_runs import RecentStepRuns
from hatchet_sdk.clients.rest.models.registered_workflow import RegisteredWorkflow
from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest
from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest
from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import (
@@ -196,6 +197,7 @@ from hatchet_sdk.clients.rest.models.tenant_alert_email_group_list import (
from hatchet_sdk.clients.rest.models.tenant_alerting_settings import (
TenantAlertingSettings,
)
from hatchet_sdk.clients.rest.models.tenant_environment import TenantEnvironment
from hatchet_sdk.clients.rest.models.tenant_invite import TenantInvite
from hatchet_sdk.clients.rest.models.tenant_invite_list import TenantInviteList
from hatchet_sdk.clients.rest.models.tenant_list import TenantList
@@ -214,12 +216,18 @@ from hatchet_sdk.clients.rest.models.tenant_version import TenantVersion
from hatchet_sdk.clients.rest.models.trigger_workflow_run_request import (
TriggerWorkflowRunRequest,
)
from hatchet_sdk.clients.rest.models.update_cron_workflow_trigger_request import (
UpdateCronWorkflowTriggerRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_alert_email_group_request import (
UpdateTenantAlertEmailGroupRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_invite_request import (
UpdateTenantInviteRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_member_request import (
UpdateTenantMemberRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_request import UpdateTenantRequest
from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRequest
from hatchet_sdk.clients.rest.models.user import User
@@ -290,6 +298,9 @@ from hatchet_sdk.clients.rest.models.v1_trigger_workflow_run_request import (
from hatchet_sdk.clients.rest.models.v1_update_filter_request import (
V1UpdateFilterRequest,
)
from hatchet_sdk.clients.rest.models.v1_update_webhook_request import (
V1UpdateWebhookRequest,
)
from hatchet_sdk.clients.rest.models.v1_webhook import V1Webhook
from hatchet_sdk.clients.rest.models.v1_webhook_api_key_auth import V1WebhookAPIKeyAuth
from hatchet_sdk.clients.rest.models.v1_webhook_auth_type import V1WebhookAuthType
@@ -302,9 +313,6 @@ from hatchet_sdk.clients.rest.models.v1_webhook_hmac_encoding import (
V1WebhookHMACEncoding,
)
from hatchet_sdk.clients.rest.models.v1_webhook_list import V1WebhookList
from hatchet_sdk.clients.rest.models.v1_webhook_receive200_response import (
V1WebhookReceive200Response,
)
from hatchet_sdk.clients.rest.models.v1_webhook_source_name import V1WebhookSourceName
from hatchet_sdk.clients.rest.models.v1_workflow_run import V1WorkflowRun
from hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails
@@ -1597,6 +1597,10 @@ class TaskApi:
Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
Field(description="The id of the event that triggered the task"),
] = None,
additional_metadata: Annotated[
Optional[List[StrictStr]],
Field(description="Additional metadata k-v pairs to filter by"),
] = None,
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
@@ -1625,6 +1629,8 @@ class TaskApi:
:type parent_task_external_id: str
:param triggering_event_external_id: The id of the event that triggered the task
:type triggering_event_external_id: str
:param additional_metadata: Additional metadata k-v pairs to filter by
:type additional_metadata: List[str]
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
@@ -1654,6 +1660,7 @@ class TaskApi:
workflow_ids=workflow_ids,
parent_task_external_id=parent_task_external_id,
triggering_event_external_id=triggering_event_external_id,
additional_metadata=additional_metadata,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
@@ -1704,6 +1711,10 @@ class TaskApi:
Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
Field(description="The id of the event that triggered the task"),
] = None,
additional_metadata: Annotated[
Optional[List[StrictStr]],
Field(description="Additional metadata k-v pairs to filter by"),
] = None,
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
@@ -1732,6 +1743,8 @@ class TaskApi:
:type parent_task_external_id: str
:param triggering_event_external_id: The id of the event that triggered the task
:type triggering_event_external_id: str
:param additional_metadata: Additional metadata k-v pairs to filter by
:type additional_metadata: List[str]
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
@@ -1761,6 +1774,7 @@ class TaskApi:
workflow_ids=workflow_ids,
parent_task_external_id=parent_task_external_id,
triggering_event_external_id=triggering_event_external_id,
additional_metadata=additional_metadata,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
@@ -1811,6 +1825,10 @@ class TaskApi:
Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]],
Field(description="The id of the event that triggered the task"),
] = None,
additional_metadata: Annotated[
Optional[List[StrictStr]],
Field(description="Additional metadata k-v pairs to filter by"),
] = None,
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
@@ -1839,6 +1857,8 @@ class TaskApi:
:type parent_task_external_id: str
:param triggering_event_external_id: The id of the event that triggered the task
:type triggering_event_external_id: str
:param additional_metadata: Additional metadata k-v pairs to filter by
:type additional_metadata: List[str]
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
@@ -1868,6 +1888,7 @@ class TaskApi:
workflow_ids=workflow_ids,
parent_task_external_id=parent_task_external_id,
triggering_event_external_id=triggering_event_external_id,
additional_metadata=additional_metadata,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
@@ -1893,6 +1914,7 @@ class TaskApi:
workflow_ids,
parent_task_external_id,
triggering_event_external_id,
additional_metadata,
_request_auth,
_content_type,
_headers,
@@ -1903,6 +1925,7 @@ class TaskApi:
_collection_formats: Dict[str, str] = {
"workflow_ids": "multi",
"additional_metadata": "multi",
}
_path_params: Dict[str, str] = {}
@@ -1954,6 +1977,10 @@ class TaskApi:
("triggering_event_external_id", triggering_event_external_id)
)
if additional_metadata is not None:
_query_params.append(("additional_metadata", additional_metadata))
# process the header parameters
# process the form parameters
# process the body parameter
@@ -49,6 +49,9 @@ from hatchet_sdk.clients.rest.models.tenant_step_run_queue_metrics import (
from hatchet_sdk.clients.rest.models.update_tenant_alert_email_group_request import (
UpdateTenantAlertEmailGroupRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_member_request import (
UpdateTenantMemberRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_request import UpdateTenantRequest
from hatchet_sdk.clients.rest.rest import RESTResponseType
@@ -2862,6 +2865,7 @@ class TenantApi:
"201": "TenantInvite",
"400": "APIErrors",
"403": "APIError",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -2939,6 +2943,7 @@ class TenantApi:
"201": "TenantInvite",
"400": "APIErrors",
"403": "APIError",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -3016,6 +3021,7 @@ class TenantApi:
"201": "TenantInvite",
"400": "APIErrors",
"403": "APIError",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -4178,6 +4184,345 @@ class TenantApi:
_request_auth=_request_auth,
)
@validate_call
def tenant_member_update(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
member: Annotated[
str,
Field(
min_length=36,
strict=True,
max_length=36,
description="The tenant member id",
),
],
update_tenant_member_request: Annotated[
UpdateTenantMemberRequest,
Field(description="The tenant member properties to update"),
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> TenantMember:
"""Update a tenant member
Update a tenant member
:param tenant: The tenant id (required)
:type tenant: str
:param member: The tenant member id (required)
:type member: str
:param update_tenant_member_request: The tenant member properties to update (required)
:type update_tenant_member_request: UpdateTenantMemberRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._tenant_member_update_serialize(
tenant=tenant,
member=member,
update_tenant_member_request=update_tenant_member_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "TenantMember",
"400": "APIErrors",
"403": "APIErrors",
"404": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
response_data.read()
return self.api_client.response_deserialize(
response_data=response_data,
response_types_map=_response_types_map,
).data
@validate_call
def tenant_member_update_with_http_info(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
member: Annotated[
str,
Field(
min_length=36,
strict=True,
max_length=36,
description="The tenant member id",
),
],
update_tenant_member_request: Annotated[
UpdateTenantMemberRequest,
Field(description="The tenant member properties to update"),
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> ApiResponse[TenantMember]:
"""Update a tenant member
Update a tenant member
:param tenant: The tenant id (required)
:type tenant: str
:param member: The tenant member id (required)
:type member: str
:param update_tenant_member_request: The tenant member properties to update (required)
:type update_tenant_member_request: UpdateTenantMemberRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._tenant_member_update_serialize(
tenant=tenant,
member=member,
update_tenant_member_request=update_tenant_member_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "TenantMember",
"400": "APIErrors",
"403": "APIErrors",
"404": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
response_data.read()
return self.api_client.response_deserialize(
response_data=response_data,
response_types_map=_response_types_map,
)
@validate_call
def tenant_member_update_without_preload_content(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
member: Annotated[
str,
Field(
min_length=36,
strict=True,
max_length=36,
description="The tenant member id",
),
],
update_tenant_member_request: Annotated[
UpdateTenantMemberRequest,
Field(description="The tenant member properties to update"),
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> RESTResponseType:
"""Update a tenant member
Update a tenant member
:param tenant: The tenant id (required)
:type tenant: str
:param member: The tenant member id (required)
:type member: str
:param update_tenant_member_request: The tenant member properties to update (required)
:type update_tenant_member_request: UpdateTenantMemberRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._tenant_member_update_serialize(
tenant=tenant,
member=member,
update_tenant_member_request=update_tenant_member_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "TenantMember",
"400": "APIErrors",
"403": "APIErrors",
"404": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
return response_data.response
def _tenant_member_update_serialize(
self,
tenant,
member,
update_tenant_member_request,
_request_auth,
_content_type,
_headers,
_host_index,
) -> RequestSerialized:
_host = None
_collection_formats: Dict[str, str] = {}
_path_params: Dict[str, str] = {}
_query_params: List[Tuple[str, str]] = []
_header_params: Dict[str, Optional[str]] = _headers or {}
_form_params: List[Tuple[str, str]] = []
_files: Dict[
str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
] = {}
_body_params: Optional[bytes] = None
# process the path parameters
if tenant is not None:
_path_params["tenant"] = tenant
if member is not None:
_path_params["member"] = member
# process the query parameters
# process the header parameters
# process the form parameters
# process the body parameter
if update_tenant_member_request is not None:
_body_params = update_tenant_member_request
# set the HTTP header `Accept`
if "Accept" not in _header_params:
_header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# set the HTTP header `Content-Type`
if _content_type:
_header_params["Content-Type"] = _content_type
else:
_default_content_type = self.api_client.select_header_content_type(
["application/json"]
)
if _default_content_type is not None:
_header_params["Content-Type"] = _default_content_type
# authentication setting
_auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
return self.api_client.param_serialize(
method="PATCH",
resource_path="/api/v1/tenants/{tenant}/members/{member}",
path_params=_path_params,
query_params=_query_params,
header_params=_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
auth_settings=_auth_settings,
collection_formats=_collection_formats,
_host=_host,
_request_auth=_request_auth,
)
@validate_call
def tenant_resource_policy_get(
self,
@@ -333,6 +333,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -400,6 +401,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -467,6 +469,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -1714,6 +1717,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -1781,6 +1785,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -1848,6 +1853,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -2211,6 +2217,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -2278,6 +2285,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -2345,6 +2353,7 @@ class UserApi:
"400": "APIErrors",
"401": "APIErrors",
"405": "APIErrors",
"422": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
@@ -22,11 +22,11 @@ from hatchet_sdk.clients.rest.api_response import ApiResponse
from hatchet_sdk.clients.rest.models.v1_create_webhook_request import (
V1CreateWebhookRequest,
)
from hatchet_sdk.clients.rest.models.v1_update_webhook_request import (
V1UpdateWebhookRequest,
)
from hatchet_sdk.clients.rest.models.v1_webhook import V1Webhook
from hatchet_sdk.clients.rest.models.v1_webhook_list import V1WebhookList
from hatchet_sdk.clients.rest.models.v1_webhook_receive200_response import (
V1WebhookReceive200Response,
)
from hatchet_sdk.clients.rest.models.v1_webhook_source_name import V1WebhookSourceName
from hatchet_sdk.clients.rest.rest import RESTResponseType
@@ -1293,7 +1293,7 @@ class WebhookApi:
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> V1WebhookReceive200Response:
) -> Dict[str, object]:
"""Post a webhook message
Post an incoming webhook message
@@ -1334,7 +1334,7 @@ class WebhookApi:
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "V1WebhookReceive200Response",
"200": "Dict[str, object]",
"400": "APIErrors",
"403": "APIErrors",
}
@@ -1368,7 +1368,7 @@ class WebhookApi:
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> ApiResponse[V1WebhookReceive200Response]:
) -> ApiResponse[Dict[str, object]]:
"""Post a webhook message
Post an incoming webhook message
@@ -1409,7 +1409,7 @@ class WebhookApi:
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "V1WebhookReceive200Response",
"200": "Dict[str, object]",
"400": "APIErrors",
"403": "APIErrors",
}
@@ -1484,7 +1484,7 @@ class WebhookApi:
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "V1WebhookReceive200Response",
"200": "Dict[str, object]",
"400": "APIErrors",
"403": "APIErrors",
}
@@ -1549,3 +1549,318 @@ class WebhookApi:
_host=_host,
_request_auth=_request_auth,
)
@validate_call
def v1_webhook_update(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
v1_webhook: Annotated[StrictStr, Field(description="The webhook name")],
v1_update_webhook_request: Annotated[
V1UpdateWebhookRequest,
Field(description="The input to the webhook creation"),
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> V1Webhook:
"""Update a webhook
Update a webhook
:param tenant: The tenant id (required)
:type tenant: str
:param v1_webhook: The webhook name (required)
:type v1_webhook: str
:param v1_update_webhook_request: The input to the webhook creation (required)
:type v1_update_webhook_request: V1UpdateWebhookRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._v1_webhook_update_serialize(
tenant=tenant,
v1_webhook=v1_webhook,
v1_update_webhook_request=v1_update_webhook_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "V1Webhook",
"400": "APIErrors",
"403": "APIErrors",
"404": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
response_data.read()
return self.api_client.response_deserialize(
response_data=response_data,
response_types_map=_response_types_map,
).data
@validate_call
def v1_webhook_update_with_http_info(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
v1_webhook: Annotated[StrictStr, Field(description="The webhook name")],
v1_update_webhook_request: Annotated[
V1UpdateWebhookRequest,
Field(description="The input to the webhook creation"),
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> ApiResponse[V1Webhook]:
"""Update a webhook
Update a webhook
:param tenant: The tenant id (required)
:type tenant: str
:param v1_webhook: The webhook name (required)
:type v1_webhook: str
:param v1_update_webhook_request: The input to the webhook creation (required)
:type v1_update_webhook_request: V1UpdateWebhookRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._v1_webhook_update_serialize(
tenant=tenant,
v1_webhook=v1_webhook,
v1_update_webhook_request=v1_update_webhook_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "V1Webhook",
"400": "APIErrors",
"403": "APIErrors",
"404": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
response_data.read()
return self.api_client.response_deserialize(
response_data=response_data,
response_types_map=_response_types_map,
)
@validate_call
def v1_webhook_update_without_preload_content(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
v1_webhook: Annotated[StrictStr, Field(description="The webhook name")],
v1_update_webhook_request: Annotated[
V1UpdateWebhookRequest,
Field(description="The input to the webhook creation"),
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> RESTResponseType:
"""Update a webhook
Update a webhook
:param tenant: The tenant id (required)
:type tenant: str
:param v1_webhook: The webhook name (required)
:type v1_webhook: str
:param v1_update_webhook_request: The input to the webhook creation (required)
:type v1_update_webhook_request: V1UpdateWebhookRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._v1_webhook_update_serialize(
tenant=tenant,
v1_webhook=v1_webhook,
v1_update_webhook_request=v1_update_webhook_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "V1Webhook",
"400": "APIErrors",
"403": "APIErrors",
"404": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
return response_data.response
def _v1_webhook_update_serialize(
self,
tenant,
v1_webhook,
v1_update_webhook_request,
_request_auth,
_content_type,
_headers,
_host_index,
) -> RequestSerialized:
_host = None
_collection_formats: Dict[str, str] = {}
_path_params: Dict[str, str] = {}
_query_params: List[Tuple[str, str]] = []
_header_params: Dict[str, Optional[str]] = _headers or {}
_form_params: List[Tuple[str, str]] = []
_files: Dict[
str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
] = {}
_body_params: Optional[bytes] = None
# process the path parameters
if tenant is not None:
_path_params["tenant"] = tenant
if v1_webhook is not None:
_path_params["v1-webhook"] = v1_webhook
# process the query parameters
# process the header parameters
# process the form parameters
# process the body parameter
if v1_update_webhook_request is not None:
_body_params = v1_update_webhook_request
# set the HTTP header `Accept`
if "Accept" not in _header_params:
_header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# set the HTTP header `Content-Type`
if _content_type:
_header_params["Content-Type"] = _content_type
else:
_default_content_type = self.api_client.select_header_content_type(
["application/json"]
)
if _default_content_type is not None:
_header_params["Content-Type"] = _default_content_type
# authentication setting
_auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
return self.api_client.param_serialize(
method="PATCH",
resource_path="/api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook}",
path_params=_path_params,
query_params=_query_params,
header_params=_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
auth_settings=_auth_settings,
collection_formats=_collection_formats,
_host=_host,
_request_auth=_request_auth,
)
@@ -34,6 +34,9 @@ from hatchet_sdk.clients.rest.models.scheduled_workflows_order_by_field import (
ScheduledWorkflowsOrderByField,
)
from hatchet_sdk.clients.rest.models.tenant_queue_metrics import TenantQueueMetrics
from hatchet_sdk.clients.rest.models.update_cron_workflow_trigger_request import (
UpdateCronWorkflowTriggerRequest,
)
from hatchet_sdk.clients.rest.models.workflow import Workflow
from hatchet_sdk.clients.rest.models.workflow_kind import WorkflowKind
from hatchet_sdk.clients.rest.models.workflow_list import WorkflowList
@@ -1437,6 +1440,330 @@ class WorkflowApi:
_request_auth=_request_auth,
)
@validate_call
def workflow_cron_update(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
cron_workflow: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The cron job id"
),
],
update_cron_workflow_trigger_request: Annotated[
UpdateCronWorkflowTriggerRequest, Field(description="The input for updates")
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> None:
"""Update cron job workflow run
Update a cron workflow for a tenant
:param tenant: The tenant id (required)
:type tenant: str
:param cron_workflow: The cron job id (required)
:type cron_workflow: str
:param update_cron_workflow_trigger_request: The input for updates (required)
:type update_cron_workflow_trigger_request: UpdateCronWorkflowTriggerRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._workflow_cron_update_serialize(
tenant=tenant,
cron_workflow=cron_workflow,
update_cron_workflow_trigger_request=update_cron_workflow_trigger_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"204": None,
"400": "APIErrors",
"403": "APIError",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
response_data.read()
return self.api_client.response_deserialize(
response_data=response_data,
response_types_map=_response_types_map,
).data
@validate_call
def workflow_cron_update_with_http_info(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
cron_workflow: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The cron job id"
),
],
update_cron_workflow_trigger_request: Annotated[
UpdateCronWorkflowTriggerRequest, Field(description="The input for updates")
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> ApiResponse[None]:
"""Update cron job workflow run
Update a cron workflow for a tenant
:param tenant: The tenant id (required)
:type tenant: str
:param cron_workflow: The cron job id (required)
:type cron_workflow: str
:param update_cron_workflow_trigger_request: The input for updates (required)
:type update_cron_workflow_trigger_request: UpdateCronWorkflowTriggerRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._workflow_cron_update_serialize(
tenant=tenant,
cron_workflow=cron_workflow,
update_cron_workflow_trigger_request=update_cron_workflow_trigger_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"204": None,
"400": "APIErrors",
"403": "APIError",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
response_data.read()
return self.api_client.response_deserialize(
response_data=response_data,
response_types_map=_response_types_map,
)
@validate_call
def workflow_cron_update_without_preload_content(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
cron_workflow: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The cron job id"
),
],
update_cron_workflow_trigger_request: Annotated[
UpdateCronWorkflowTriggerRequest, Field(description="The input for updates")
],
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> RESTResponseType:
"""Update cron job workflow run
Update a cron workflow for a tenant
:param tenant: The tenant id (required)
:type tenant: str
:param cron_workflow: The cron job id (required)
:type cron_workflow: str
:param update_cron_workflow_trigger_request: The input for updates (required)
:type update_cron_workflow_trigger_request: UpdateCronWorkflowTriggerRequest
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._workflow_cron_update_serialize(
tenant=tenant,
cron_workflow=cron_workflow,
update_cron_workflow_trigger_request=update_cron_workflow_trigger_request,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"204": None,
"400": "APIErrors",
"403": "APIError",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
return response_data.response
def _workflow_cron_update_serialize(
self,
tenant,
cron_workflow,
update_cron_workflow_trigger_request,
_request_auth,
_content_type,
_headers,
_host_index,
) -> RequestSerialized:
_host = None
_collection_formats: Dict[str, str] = {}
_path_params: Dict[str, str] = {}
_query_params: List[Tuple[str, str]] = []
_header_params: Dict[str, Optional[str]] = _headers or {}
_form_params: List[Tuple[str, str]] = []
_files: Dict[
str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
] = {}
_body_params: Optional[bytes] = None
# process the path parameters
if tenant is not None:
_path_params["tenant"] = tenant
if cron_workflow is not None:
_path_params["cron-workflow"] = cron_workflow
# process the query parameters
# process the header parameters
# process the form parameters
# process the body parameter
if update_cron_workflow_trigger_request is not None:
_body_params = update_cron_workflow_trigger_request
# set the HTTP header `Accept`
if "Accept" not in _header_params:
_header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# set the HTTP header `Content-Type`
if _content_type:
_header_params["Content-Type"] = _content_type
else:
_default_content_type = self.api_client.select_header_content_type(
["application/json"]
)
if _default_content_type is not None:
_header_params["Content-Type"] = _default_content_type
# authentication setting
_auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
return self.api_client.param_serialize(
method="PATCH",
resource_path="/api/v1/tenants/{tenant}/workflows/crons/{cron-workflow}",
path_params=_path_params,
query_params=_query_params,
header_params=_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
auth_settings=_auth_settings,
collection_formats=_collection_formats,
_host=_host,
_request_auth=_request_auth,
)
@validate_call
def workflow_delete(
self,
@@ -640,6 +640,414 @@ class WorkflowRunsApi:
_request_auth=_request_auth,
)
@validate_call
def v1_workflow_run_external_ids_list(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
since: Annotated[datetime, Field(description="The earliest date to filter by")],
statuses: Annotated[
Optional[List[V1TaskStatus]],
Field(description="A list of statuses to filter by"),
] = None,
until: Annotated[
Optional[datetime], Field(description="The latest date to filter by")
] = None,
additional_metadata: Annotated[
Optional[List[StrictStr]],
Field(description="Additional metadata k-v pairs to filter by"),
] = None,
workflow_ids: Annotated[
Optional[
List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]]
],
Field(description="The workflow ids to find runs for"),
] = None,
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> List[str]:
"""List workflow run external ids
Lists external ids for workflow runs matching filters
:param tenant: The tenant id (required)
:type tenant: str
:param since: The earliest date to filter by (required)
:type since: datetime
:param statuses: A list of statuses to filter by
:type statuses: List[V1TaskStatus]
:param until: The latest date to filter by
:type until: datetime
:param additional_metadata: Additional metadata k-v pairs to filter by
:type additional_metadata: List[str]
:param workflow_ids: The workflow ids to find runs for
:type workflow_ids: List[str]
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._v1_workflow_run_external_ids_list_serialize(
tenant=tenant,
since=since,
statuses=statuses,
until=until,
additional_metadata=additional_metadata,
workflow_ids=workflow_ids,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "List[str]",
"400": "APIErrors",
"403": "APIErrors",
"501": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
response_data.read()
return self.api_client.response_deserialize(
response_data=response_data,
response_types_map=_response_types_map,
).data
@validate_call
def v1_workflow_run_external_ids_list_with_http_info(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
since: Annotated[datetime, Field(description="The earliest date to filter by")],
statuses: Annotated[
Optional[List[V1TaskStatus]],
Field(description="A list of statuses to filter by"),
] = None,
until: Annotated[
Optional[datetime], Field(description="The latest date to filter by")
] = None,
additional_metadata: Annotated[
Optional[List[StrictStr]],
Field(description="Additional metadata k-v pairs to filter by"),
] = None,
workflow_ids: Annotated[
Optional[
List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]]
],
Field(description="The workflow ids to find runs for"),
] = None,
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> ApiResponse[List[str]]:
"""List workflow run external ids
Lists external ids for workflow runs matching filters
:param tenant: The tenant id (required)
:type tenant: str
:param since: The earliest date to filter by (required)
:type since: datetime
:param statuses: A list of statuses to filter by
:type statuses: List[V1TaskStatus]
:param until: The latest date to filter by
:type until: datetime
:param additional_metadata: Additional metadata k-v pairs to filter by
:type additional_metadata: List[str]
:param workflow_ids: The workflow ids to find runs for
:type workflow_ids: List[str]
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._v1_workflow_run_external_ids_list_serialize(
tenant=tenant,
since=since,
statuses=statuses,
until=until,
additional_metadata=additional_metadata,
workflow_ids=workflow_ids,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "List[str]",
"400": "APIErrors",
"403": "APIErrors",
"501": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
response_data.read()
return self.api_client.response_deserialize(
response_data=response_data,
response_types_map=_response_types_map,
)
@validate_call
def v1_workflow_run_external_ids_list_without_preload_content(
self,
tenant: Annotated[
str,
Field(
min_length=36, strict=True, max_length=36, description="The tenant id"
),
],
since: Annotated[datetime, Field(description="The earliest date to filter by")],
statuses: Annotated[
Optional[List[V1TaskStatus]],
Field(description="A list of statuses to filter by"),
] = None,
until: Annotated[
Optional[datetime], Field(description="The latest date to filter by")
] = None,
additional_metadata: Annotated[
Optional[List[StrictStr]],
Field(description="Additional metadata k-v pairs to filter by"),
] = None,
workflow_ids: Annotated[
Optional[
List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]]
],
Field(description="The workflow ids to find runs for"),
] = None,
_request_timeout: Union[
None,
Annotated[StrictFloat, Field(gt=0)],
Tuple[
Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)]
],
] = None,
_request_auth: Optional[Dict[StrictStr, Any]] = None,
_content_type: Optional[StrictStr] = None,
_headers: Optional[Dict[StrictStr, Any]] = None,
_host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0,
) -> RESTResponseType:
"""List workflow run external ids
Lists external ids for workflow runs matching filters
:param tenant: The tenant id (required)
:type tenant: str
:param since: The earliest date to filter by (required)
:type since: datetime
:param statuses: A list of statuses to filter by
:type statuses: List[V1TaskStatus]
:param until: The latest date to filter by
:type until: datetime
:param additional_metadata: Additional metadata k-v pairs to filter by
:type additional_metadata: List[str]
:param workflow_ids: The workflow ids to find runs for
:type workflow_ids: List[str]
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:type _request_timeout: int, tuple(int, int), optional
:param _request_auth: set to override the auth_settings for an a single
request; this effectively ignores the
authentication in the spec for a single request.
:type _request_auth: dict, optional
:param _content_type: force content-type for the request.
:type _content_type: str, Optional
:param _headers: set to override the headers for a single
request; this effectively ignores the headers
in the spec for a single request.
:type _headers: dict, optional
:param _host_index: set to override the host_index for a single
request; this effectively ignores the host_index
in the spec for a single request.
:type _host_index: int, optional
:return: Returns the result object.
""" # noqa: E501
_param = self._v1_workflow_run_external_ids_list_serialize(
tenant=tenant,
since=since,
statuses=statuses,
until=until,
additional_metadata=additional_metadata,
workflow_ids=workflow_ids,
_request_auth=_request_auth,
_content_type=_content_type,
_headers=_headers,
_host_index=_host_index,
)
_response_types_map: Dict[str, Optional[str]] = {
"200": "List[str]",
"400": "APIErrors",
"403": "APIErrors",
"501": "APIErrors",
}
response_data = self.api_client.call_api(
*_param, _request_timeout=_request_timeout
)
return response_data.response
def _v1_workflow_run_external_ids_list_serialize(
self,
tenant,
since,
statuses,
until,
additional_metadata,
workflow_ids,
_request_auth,
_content_type,
_headers,
_host_index,
) -> RequestSerialized:
_host = None
_collection_formats: Dict[str, str] = {
"statuses": "multi",
"additional_metadata": "multi",
"workflow_ids": "multi",
}
_path_params: Dict[str, str] = {}
_query_params: List[Tuple[str, str]] = []
_header_params: Dict[str, Optional[str]] = _headers or {}
_form_params: List[Tuple[str, str]] = []
_files: Dict[
str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]]
] = {}
_body_params: Optional[bytes] = None
# process the path parameters
if tenant is not None:
_path_params["tenant"] = tenant
# process the query parameters
if statuses is not None:
_query_params.append(("statuses", statuses))
if since is not None:
if isinstance(since, datetime):
_query_params.append(
(
"since",
since.strftime(self.api_client.configuration.datetime_format),
)
)
else:
_query_params.append(("since", since))
if until is not None:
if isinstance(until, datetime):
_query_params.append(
(
"until",
until.strftime(self.api_client.configuration.datetime_format),
)
)
else:
_query_params.append(("until", until))
if additional_metadata is not None:
_query_params.append(("additional_metadata", additional_metadata))
if workflow_ids is not None:
_query_params.append(("workflow_ids", workflow_ids))
# process the header parameters
# process the form parameters
# process the body parameter
# set the HTTP header `Accept`
if "Accept" not in _header_params:
_header_params["Accept"] = self.api_client.select_header_accept(
["application/json"]
)
# authentication setting
_auth_settings: List[str] = ["cookieAuth", "bearerAuth"]
return self.api_client.param_serialize(
method="GET",
resource_path="/api/v1/stable/tenants/{tenant}/workflow-runs/external-ids",
path_params=_path_params,
query_params=_query_params,
header_params=_header_params,
body=_body_params,
post_params=_form_params,
files=_files,
auth_settings=_auth_settings,
collection_formats=_collection_formats,
_host=_host,
_request_auth=_request_auth,
)
@validate_call
def v1_workflow_run_get(
self,
@@ -121,6 +121,7 @@ AuthSettings = TypedDict(
{
"bearerAuth": BearerAuthSetting,
"cookieAuth": APIKeyAuthSetting,
"customAuth": BearerAuthSetting,
},
total=False,
)
@@ -537,6 +538,13 @@ class Configuration:
"cookieAuth",
),
}
if self.access_token is not None:
auth["customAuth"] = {
"type": "bearer",
"in": "header",
"key": "Authorization",
"value": "Bearer " + self.access_token,
}
return auth
def to_debug_report(self) -> str:
@@ -112,6 +112,7 @@ from hatchet_sdk.clients.rest.models.rate_limit_order_by_field import (
RateLimitOrderByField,
)
from hatchet_sdk.clients.rest.models.recent_step_runs import RecentStepRuns
from hatchet_sdk.clients.rest.models.registered_workflow import RegisteredWorkflow
from hatchet_sdk.clients.rest.models.reject_invite_request import RejectInviteRequest
from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest
from hatchet_sdk.clients.rest.models.replay_workflow_runs_request import (
@@ -158,6 +159,7 @@ from hatchet_sdk.clients.rest.models.tenant_alert_email_group_list import (
from hatchet_sdk.clients.rest.models.tenant_alerting_settings import (
TenantAlertingSettings,
)
from hatchet_sdk.clients.rest.models.tenant_environment import TenantEnvironment
from hatchet_sdk.clients.rest.models.tenant_invite import TenantInvite
from hatchet_sdk.clients.rest.models.tenant_invite_list import TenantInviteList
from hatchet_sdk.clients.rest.models.tenant_list import TenantList
@@ -176,12 +178,18 @@ from hatchet_sdk.clients.rest.models.tenant_version import TenantVersion
from hatchet_sdk.clients.rest.models.trigger_workflow_run_request import (
TriggerWorkflowRunRequest,
)
from hatchet_sdk.clients.rest.models.update_cron_workflow_trigger_request import (
UpdateCronWorkflowTriggerRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_alert_email_group_request import (
UpdateTenantAlertEmailGroupRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_invite_request import (
UpdateTenantInviteRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_member_request import (
UpdateTenantMemberRequest,
)
from hatchet_sdk.clients.rest.models.update_tenant_request import UpdateTenantRequest
from hatchet_sdk.clients.rest.models.update_worker_request import UpdateWorkerRequest
from hatchet_sdk.clients.rest.models.user import User
@@ -252,6 +260,9 @@ from hatchet_sdk.clients.rest.models.v1_trigger_workflow_run_request import (
from hatchet_sdk.clients.rest.models.v1_update_filter_request import (
V1UpdateFilterRequest,
)
from hatchet_sdk.clients.rest.models.v1_update_webhook_request import (
V1UpdateWebhookRequest,
)
from hatchet_sdk.clients.rest.models.v1_webhook import V1Webhook
from hatchet_sdk.clients.rest.models.v1_webhook_api_key_auth import V1WebhookAPIKeyAuth
from hatchet_sdk.clients.rest.models.v1_webhook_auth_type import V1WebhookAuthType
@@ -264,9 +275,6 @@ from hatchet_sdk.clients.rest.models.v1_webhook_hmac_encoding import (
V1WebhookHMACEncoding,
)
from hatchet_sdk.clients.rest.models.v1_webhook_list import V1WebhookList
from hatchet_sdk.clients.rest.models.v1_webhook_receive200_response import (
V1WebhookReceive200Response,
)
from hatchet_sdk.clients.rest.models.v1_webhook_source_name import V1WebhookSourceName
from hatchet_sdk.clients.rest.models.v1_workflow_run import V1WorkflowRun
from hatchet_sdk.clients.rest.models.v1_workflow_run_details import V1WorkflowRunDetails
@@ -22,6 +22,7 @@ from typing import Any, ClassVar, Dict, List, Optional, Set
from pydantic import BaseModel, ConfigDict, Field, StrictStr
from typing_extensions import Self
from hatchet_sdk.clients.rest.models.tenant_environment import TenantEnvironment
from hatchet_sdk.clients.rest.models.tenant_ui_version import TenantUIVersion
from hatchet_sdk.clients.rest.models.tenant_version import TenantVersion
@@ -43,7 +44,22 @@ class CreateTenantRequest(BaseModel):
description="The engine version of the tenant. Defaults to V0.",
alias="engineVersion",
)
__properties: ClassVar[List[str]] = ["name", "slug", "uiVersion", "engineVersion"]
environment: Optional[TenantEnvironment] = Field(
default=None, description="The environment type of the tenant."
)
onboarding_data: Optional[Dict[str, Any]] = Field(
default=None,
description="Additional onboarding data to store with the tenant.",
alias="onboardingData",
)
__properties: ClassVar[List[str]] = [
"name",
"slug",
"uiVersion",
"engineVersion",
"environment",
"onboardingData",
]
model_config = ConfigDict(
populate_by_name=True,
@@ -99,6 +115,8 @@ class CreateTenantRequest(BaseModel):
"slug": obj.get("slug"),
"uiVersion": obj.get("uiVersion"),
"engineVersion": obj.get("engineVersion"),
"environment": obj.get("environment"),
"onboardingData": obj.get("onboardingData"),
}
)
return _obj
@@ -0,0 +1,86 @@
# coding: utf-8
"""
Hatchet API
The Hatchet API
The version of the OpenAPI document: 1.0.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import json
import pprint
import re # noqa: F401
from typing import Any, ClassVar, Dict, List, Optional, Set
from pydantic import BaseModel, ConfigDict, Field, StrictStr
from typing_extensions import Self
class RegisteredWorkflow(BaseModel):
"""
RegisteredWorkflow
""" # noqa: E501
id: StrictStr = Field(description="The workflow id registered on this worker.")
name: StrictStr = Field(
description="The name of the workflow registered on this worker."
)
__properties: ClassVar[List[str]] = ["id", "name"]
model_config = ConfigDict(
populate_by_name=True,
validate_assignment=True,
protected_namespaces=(),
)
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
@classmethod
def from_json(cls, json_str: str) -> Optional[Self]:
"""Create an instance of RegisteredWorkflow from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
excluded_fields: Set[str] = set([])
_dict = self.model_dump(
by_alias=True,
exclude=excluded_fields,
exclude_none=True,
)
return _dict
@classmethod
def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
"""Create an instance of RegisteredWorkflow from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({"id": obj.get("id"), "name": obj.get("name")})
return _obj
@@ -23,6 +23,7 @@ from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
from typing_extensions import Self
from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
from hatchet_sdk.clients.rest.models.tenant_environment import TenantEnvironment
from hatchet_sdk.clients.rest.models.tenant_ui_version import TenantUIVersion
from hatchet_sdk.clients.rest.models.tenant_version import TenantVersion
@@ -49,6 +50,9 @@ class Tenant(BaseModel):
ui_version: Optional[TenantUIVersion] = Field(
default=None, description="The UI of the tenant.", alias="uiVersion"
)
environment: Optional[TenantEnvironment] = Field(
default=None, description="The environment type of the tenant."
)
__properties: ClassVar[List[str]] = [
"metadata",
"name",
@@ -57,6 +61,7 @@ class Tenant(BaseModel):
"alertMemberEmails",
"version",
"uiVersion",
"environment",
]
model_config = ConfigDict(
@@ -123,6 +128,7 @@ class Tenant(BaseModel):
"alertMemberEmails": obj.get("alertMemberEmails"),
"version": obj.get("version"),
"uiVersion": obj.get("uiVersion"),
"environment": obj.get("environment"),
}
)
return _obj
@@ -0,0 +1,38 @@
# coding: utf-8
"""
Hatchet API
The Hatchet API
The version of the OpenAPI document: 1.0.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import json
from enum import Enum
from typing_extensions import Self
class TenantEnvironment(str, Enum):
"""
TenantEnvironment
"""
"""
allowed enum values
"""
LOCAL = "local"
DEVELOPMENT = "development"
PRODUCTION = "production"
@classmethod
def from_json(cls, json_str: str) -> Self:
"""Create an instance of TenantEnvironment from a JSON string"""
return cls(json.loads(json_str))
@@ -0,0 +1,83 @@
# coding: utf-8
"""
Hatchet API
The Hatchet API
The version of the OpenAPI document: 1.0.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import json
import pprint
import re # noqa: F401
from typing import Any, ClassVar, Dict, List, Optional, Set
from pydantic import BaseModel, ConfigDict, StrictBool
from typing_extensions import Self
class UpdateCronWorkflowTriggerRequest(BaseModel):
"""
UpdateCronWorkflowTriggerRequest
""" # noqa: E501
enabled: Optional[StrictBool] = None
__properties: ClassVar[List[str]] = ["enabled"]
model_config = ConfigDict(
populate_by_name=True,
validate_assignment=True,
protected_namespaces=(),
)
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
@classmethod
def from_json(cls, json_str: str) -> Optional[Self]:
"""Create an instance of UpdateCronWorkflowTriggerRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
excluded_fields: Set[str] = set([])
_dict = self.model_dump(
by_alias=True,
exclude=excluded_fields,
exclude_none=True,
)
return _dict
@classmethod
def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
"""Create an instance of UpdateCronWorkflowTriggerRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({"enabled": obj.get("enabled")})
return _obj
@@ -0,0 +1,85 @@
# coding: utf-8
"""
Hatchet API
The Hatchet API
The version of the OpenAPI document: 1.0.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import json
import pprint
import re # noqa: F401
from typing import Any, ClassVar, Dict, List, Optional, Set
from pydantic import BaseModel, ConfigDict, Field
from typing_extensions import Self
from hatchet_sdk.clients.rest.models.tenant_member_role import TenantMemberRole
class UpdateTenantMemberRequest(BaseModel):
"""
UpdateTenantMemberRequest
""" # noqa: E501
role: TenantMemberRole = Field(description="The role of the user in the tenant.")
__properties: ClassVar[List[str]] = ["role"]
model_config = ConfigDict(
populate_by_name=True,
validate_assignment=True,
protected_namespaces=(),
)
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
@classmethod
def from_json(cls, json_str: str) -> Optional[Self]:
"""Create an instance of UpdateTenantMemberRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
excluded_fields: Set[str] = set([])
_dict = self.model_dump(
by_alias=True,
exclude=excluded_fields,
exclude_none=True,
)
return _dict
@classmethod
def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
"""Create an instance of UpdateTenantMemberRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({"role": obj.get("role")})
return _obj
@@ -19,7 +19,7 @@ import pprint
import re # noqa: F401
from typing import Any, ClassVar, Dict, List, Optional, Set
from pydantic import BaseModel, ConfigDict, Field, StrictStr
from pydantic import BaseModel, ConfigDict, Field, StrictBool, StrictStr
from typing_extensions import Annotated, Self
from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
@@ -50,6 +50,11 @@ class V1Filter(BaseModel):
payload: Dict[str, Any] = Field(
description="Additional payload data associated with the filter"
)
is_declarative: Optional[StrictBool] = Field(
default=None,
description="Whether the filter is declarative (true) or programmatic (false)",
alias="isDeclarative",
)
__properties: ClassVar[List[str]] = [
"metadata",
"tenantId",
@@ -57,6 +62,7 @@ class V1Filter(BaseModel):
"scope",
"expression",
"payload",
"isDeclarative",
]
model_config = ConfigDict(
@@ -122,6 +128,7 @@ class V1Filter(BaseModel):
"scope": obj.get("scope"),
"expression": obj.get("expression"),
"payload": obj.get("payload"),
"isDeclarative": obj.get("isDeclarative"),
}
)
return _obj
@@ -0,0 +1,86 @@
# coding: utf-8
"""
Hatchet API
The Hatchet API
The version of the OpenAPI document: 1.0.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import json
import pprint
import re # noqa: F401
from typing import Any, ClassVar, Dict, List, Optional, Set
from pydantic import BaseModel, ConfigDict, Field, StrictStr
from typing_extensions import Self
class V1UpdateWebhookRequest(BaseModel):
"""
V1UpdateWebhookRequest
""" # noqa: E501
event_key_expression: StrictStr = Field(
description="The CEL expression to use for the event key. This is used to create the event key from the webhook payload.",
alias="eventKeyExpression",
)
__properties: ClassVar[List[str]] = ["eventKeyExpression"]
model_config = ConfigDict(
populate_by_name=True,
validate_assignment=True,
protected_namespaces=(),
)
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
@classmethod
def from_json(cls, json_str: str) -> Optional[Self]:
"""Create an instance of V1UpdateWebhookRequest from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
excluded_fields: Set[str] = set([])
_dict = self.model_dump(
by_alias=True,
exclude=excluded_fields,
exclude_none=True,
)
return _dict
@classmethod
def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
"""Create an instance of V1UpdateWebhookRequest from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({"eventKeyExpression": obj.get("eventKeyExpression")})
return _obj
@@ -31,6 +31,8 @@ class V1WebhookSourceName(str, Enum):
GENERIC = "GENERIC"
GITHUB = "GITHUB"
STRIPE = "STRIPE"
SLACK = "SLACK"
LINEAR = "LINEAR"
@classmethod
def from_json(cls, json_str: str) -> Self:
@@ -25,6 +25,7 @@ from typing_extensions import Annotated, Self
from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta
from hatchet_sdk.clients.rest.models.recent_step_runs import RecentStepRuns
from hatchet_sdk.clients.rest.models.registered_workflow import RegisteredWorkflow
from hatchet_sdk.clients.rest.models.semaphore_slots import SemaphoreSlots
from hatchet_sdk.clients.rest.models.worker_label import WorkerLabel
from hatchet_sdk.clients.rest.models.worker_runtime_info import WorkerRuntimeInfo
@@ -52,6 +53,11 @@ class Worker(BaseModel):
actions: Optional[List[StrictStr]] = Field(
default=None, description="The actions this worker can perform."
)
registered_workflows: Optional[List[RegisteredWorkflow]] = Field(
default=None,
description="The workflow ids registered on this worker.",
alias="registeredWorkflows",
)
slots: Optional[List[SemaphoreSlots]] = Field(
default=None, description="The semaphore slot state for the worker."
)
@@ -97,6 +103,7 @@ class Worker(BaseModel):
"lastHeartbeatAt",
"lastListenerEstablished",
"actions",
"registeredWorkflows",
"slots",
"recentStepRuns",
"status",
@@ -161,6 +168,13 @@ class Worker(BaseModel):
# override the default output from pydantic by calling `to_dict()` of metadata
if self.metadata:
_dict["metadata"] = self.metadata.to_dict()
# override the default output from pydantic by calling `to_dict()` of each item in registered_workflows (list)
_items = []
if self.registered_workflows:
for _item_registered_workflows in self.registered_workflows:
if _item_registered_workflows:
_items.append(_item_registered_workflows.to_dict())
_dict["registeredWorkflows"] = _items
# override the default output from pydantic by calling `to_dict()` of each item in slots (list)
_items = []
if self.slots:
@@ -208,6 +222,14 @@ class Worker(BaseModel):
"lastHeartbeatAt": obj.get("lastHeartbeatAt"),
"lastListenerEstablished": obj.get("lastListenerEstablished"),
"actions": obj.get("actions"),
"registeredWorkflows": (
[
RegisteredWorkflow.from_dict(_item)
for _item in obj["registeredWorkflows"]
]
if obj.get("registeredWorkflows") is not None
else None
),
"slots": (
[SemaphoreSlots.from_dict(_item) for _item in obj["slots"]]
if obj.get("slots") is not None
+212
View File
@@ -1,4 +1,5 @@
import asyncio
import time
from collections.abc import AsyncIterator
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING, Literal, overload
@@ -32,6 +33,7 @@ from hatchet_sdk.clients.v1.api_client import (
from hatchet_sdk.config import ClientConfig
from hatchet_sdk.utils.aio import gather_max_concurrency
from hatchet_sdk.utils.datetimes import partition_date_range
from hatchet_sdk.utils.iterables import create_chunks
from hatchet_sdk.utils.typing import JSONSerializableMapping
if TYPE_CHECKING:
@@ -179,6 +181,216 @@ class RunsClient(BaseRestClient):
"""
return await asyncio.to_thread(self.get_status, workflow_run_id)
def _perform_action_with_pagination(
self,
action: Literal["cancel", "replay"],
statuses: list[V1TaskStatus],
sleep_time: int = 3,
chunk_size: int = 500,
since: datetime | None = None,
until: datetime | None = None,
additional_metadata: dict[str, str] | None = None,
workflow_ids: list[str] | None = None,
) -> None:
"""
Perform a bulk action (cancel or replay) on runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than
the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
:param action: The action to perform, either "cancel" or "replay".
:param statuses: The statuses to filter runs by.
:param sleep_time: The time to sleep between processing chunks, in seconds.
:param chunk_size: The maximum number of run IDs to process in each chunk.
:param since: The start time for filtering runs.
:param until: The end time for filtering runs.
:param additional_metadata: Additional metadata to filter runs by.
:param workflow_ids: The workflow IDs to filter runs by.
"""
until = until or datetime.now(tz=timezone.utc)
since = since or (until - timedelta(days=1))
with self.client() as client:
external_ids = self._wra(client).v1_workflow_run_external_ids_list(
tenant=self.client_config.tenant_id,
since=since,
until=until,
additional_metadata=maybe_additional_metadata_to_kv(
additional_metadata
),
statuses=statuses,
workflow_ids=workflow_ids,
)
chunks = list(create_chunks(external_ids, chunk_size))
func = self.bulk_cancel if action == "cancel" else self.bulk_replay
for ix, chunk in enumerate(chunks):
self.client_config.logger.info(
f"processing chunk {ix + 1}/{len(chunks)} of {len(chunk)} ids" # noqa: G004
)
opts = BulkCancelReplayOpts(ids=chunk)
func(opts=opts)
time.sleep(sleep_time)
def bulk_replay_by_filters_with_pagination(
self,
sleep_time: int = 3,
chunk_size: int = 500,
since: datetime | None = None,
until: datetime | None = None,
statuses: list[V1TaskStatus] | None = None,
additional_metadata: dict[str, str] | None = None,
workflow_ids: list[str] | None = None,
) -> None:
"""
Replay runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than
the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
:param sleep_time: The time to sleep between processing chunks, in seconds.
:param chunk_size: The maximum number of run IDs to process in each chunk.
:param since: The start time for filtering runs.
:param until: The end time for filtering runs.
:param statuses: The statuses to filter runs by.
:param additional_metadata: Additional metadata to filter runs by.
:param workflow_ids: The workflow IDs to filter runs by.
"""
self._perform_action_with_pagination(
since=since,
action="replay",
sleep_time=sleep_time,
chunk_size=chunk_size,
until=until,
statuses=statuses or [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED],
additional_metadata=additional_metadata,
workflow_ids=workflow_ids,
)
def bulk_cancel_by_filters_with_pagination(
self,
sleep_time: int = 3,
chunk_size: int = 500,
since: datetime | None = None,
until: datetime | None = None,
statuses: list[V1TaskStatus] | None = None,
additional_metadata: dict[str, str] | None = None,
workflow_ids: list[str] | None = None,
) -> None:
"""
Cancel runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than
the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
:param sleep_time: The time to sleep between processing chunks, in seconds.
:param chunk_size: The maximum number of run IDs to process in each chunk.
:param since: The start time for filtering runs.
:param until: The end time for filtering runs.
:param statuses: The statuses to filter runs by.
:param additional_metadata: Additional metadata to filter runs by.
:param workflow_ids: The workflow IDs to filter runs by.
"""
self._perform_action_with_pagination(
since=since,
action="cancel",
sleep_time=sleep_time,
chunk_size=chunk_size,
until=until,
statuses=statuses or [V1TaskStatus.RUNNING, V1TaskStatus.QUEUED],
additional_metadata=additional_metadata,
workflow_ids=workflow_ids,
)
async def aio_bulk_replay_by_filters_with_pagination(
self,
sleep_time: int = 3,
chunk_size: int = 500,
since: datetime | None = None,
until: datetime | None = None,
statuses: list[V1TaskStatus] | None = None,
additional_metadata: dict[str, str] | None = None,
workflow_ids: list[str] | None = None,
) -> None:
"""
Replay runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than
the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
:param sleep_time: The time to sleep between processing chunks, in seconds.
:param chunk_size: The maximum number of run IDs to process in each chunk.
:param since: The start time for filtering runs.
:param until: The end time for filtering runs.
:param statuses: The statuses to filter runs by.
:param additional_metadata: Additional metadata to filter runs by.
:param workflow_ids: The workflow IDs to filter runs by.
"""
await asyncio.to_thread(
self._perform_action_with_pagination,
since=since,
action="replay",
sleep_time=sleep_time,
chunk_size=chunk_size,
until=until,
statuses=statuses or [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED],
additional_metadata=additional_metadata,
workflow_ids=workflow_ids,
)
async def aio_bulk_cancel_by_filters_with_pagination(
self,
sleep_time: int = 3,
chunk_size: int = 500,
since: datetime | None = None,
until: datetime | None = None,
statuses: list[V1TaskStatus] | None = None,
additional_metadata: dict[str, str] | None = None,
workflow_ids: list[str] | None = None,
) -> None:
"""
Cancel runs matching the specified filters in chunks.
The motivation for this method is to provide an easy way to perform bulk operations by filters over a larger number of runs than
the API would normally be able to handle, with automatic pagination and chunking to help limit the pressure on the API.
This method first pulls the IDs of the runs from the API, and then feeds them back to the API in chunks.
:param sleep_time: The time to sleep between processing chunks, in seconds.
:param chunk_size: The maximum number of run IDs to process in each chunk.
:param since: The start time for filtering runs.
:param until: The end time for filtering runs.
:param statuses: The statuses to filter runs by.
:param additional_metadata: Additional metadata to filter runs by.
:param workflow_ids: The workflow IDs to filter runs by.
"""
await asyncio.to_thread(
self._perform_action_with_pagination,
since=since,
action="cancel",
sleep_time=sleep_time,
chunk_size=chunk_size,
until=until,
statuses=statuses or [V1TaskStatus.RUNNING, V1TaskStatus.QUEUED],
additional_metadata=additional_metadata,
workflow_ids=workflow_ids,
)
@retry
def list_with_pagination(
self,
@@ -0,0 +1,9 @@
from collections.abc import Generator
from typing import TypeVar
T = TypeVar("T")
def create_chunks(xs: list[T], n: int) -> Generator[list[T], None, None]:
for i in range(0, len(xs), n):
yield xs[i : i + n]
+1 -1
View File
@@ -1,6 +1,6 @@
[tool.poetry]
name = "hatchet-sdk"
version = "1.20.0"
version = "1.20.1"
description = ""
authors = ["Alexander Belanger <alexander@hatchet.run>"]
readme = "README.md"