diff --git a/api-contracts/events/events.proto b/api-contracts/events/events.proto index 4306518cd..0f7d7a14f 100644 --- a/api-contracts/events/events.proto +++ b/api-contracts/events/events.proto @@ -10,6 +10,8 @@ service EventsService { rpc List(ListEventRequest) returns (ListEventResponse) {} rpc ReplaySingleEvent(ReplayEventRequest) returns (Event) {} + + rpc PutLog(PutLogRequest) returns (PutLogResponse) {} } message Event { @@ -29,6 +31,25 @@ message Event { google.protobuf.Timestamp eventTimestamp = 5; } +message PutLogRequest { + // the step run id for the request + string stepRunId = 1; + + // when the log line was created + google.protobuf.Timestamp createdAt = 2; + + // the log line message + string message = 3; + + // the log line level + optional string level = 4; + + // associated log line metadata + string metadata = 5; +} + +message PutLogResponse {} + message PushEventRequest { // the key for the event string key = 1; diff --git a/api-contracts/openapi/components/schemas/_index.yaml b/api-contracts/openapi/components/schemas/_index.yaml index b8c8c2e15..0fa244f9c 100644 --- a/api-contracts/openapi/components/schemas/_index.yaml +++ b/api-contracts/openapi/components/schemas/_index.yaml @@ -152,3 +152,17 @@ PullRequest: $ref: "./workflow_run.yaml#/PullRequest" PullRequestState: $ref: "./workflow_run.yaml#/PullRequestState" +LogLine: + $ref: "./logs.yaml#/LogLine" +LogLineLevel: + $ref: "./logs.yaml#/LogLineLevel" +LogLineList: + $ref: "./logs.yaml#/LogLineList" +LogLineOrderByField: + $ref: "./logs.yaml#/LogLineOrderByField" +LogLineOrderByDirection: + $ref: "./logs.yaml#/LogLineOrderByDirection" +LogLineSearch: + $ref: "./logs.yaml#/LogLineSearch" +LogLineLevelField: + $ref: "./logs.yaml#/LogLineLevelField" diff --git a/api-contracts/openapi/components/schemas/logs.yaml b/api-contracts/openapi/components/schemas/logs.yaml new file mode 100644 index 000000000..fb701c5ea --- /dev/null +++ b/api-contracts/openapi/components/schemas/logs.yaml @@ -0,0 +1,52 @@ +LogLine: + properties: + createdAt: + type: string + format: date-time + description: The creation date of the log line. + message: + type: string + description: The log message. + metadata: + type: object + description: The log metadata. + required: + - createdAt + - message + - metadata + +LogLineLevel: + type: string + enum: + - DEBUG + - INFO + - WARN + - ERROR + +LogLineList: + properties: + pagination: + $ref: "./metadata.yaml#/PaginationResponse" + rows: + items: + $ref: "#/LogLine" + type: array + +LogLineOrderByField: + type: string + enum: + - createdAt + +LogLineOrderByDirection: + type: string + enum: + - asc + - desc + +LogLineSearch: + type: string + +LogLineLevelField: + type: array + items: + $ref: "#/LogLineLevel" diff --git a/api-contracts/openapi/openapi.yaml b/api-contracts/openapi/openapi.yaml index 086fe5896..fd595c94a 100644 --- a/api-contracts/openapi/openapi.yaml +++ b/api-contracts/openapi/openapi.yaml @@ -90,6 +90,8 @@ paths: $ref: "./paths/workflow/workflow.yaml#/linkGithub" /api/v1/step-runs/{step-run}/create-pr: $ref: "./paths/workflow/workflow.yaml#/createPullRequest" + /api/v1/step-runs/{step-run}/logs: + $ref: "./paths/log/log.yaml#/withStepRun" /api/v1/step-runs/{step-run}/diff: $ref: "./paths/workflow/workflow.yaml#/getDiff" /api/v1/tenants/{tenant}/workflows/runs: diff --git a/api-contracts/openapi/paths/log/log.yaml b/api-contracts/openapi/paths/log/log.yaml new file mode 100644 index 000000000..e7c3c7e07 --- /dev/null +++ b/api-contracts/openapi/paths/log/log.yaml @@ -0,0 +1,75 @@ +withStepRun: + get: + x-resources: ["tenant", "step-run"] + description: Lists log lines for a step run. + operationId: log-line:list + parameters: + - description: The step run id + in: path + name: step-run + required: true + schema: + type: string + format: uuid + minLength: 36 + maxLength: 36 + - description: The number to skip + in: query + name: offset + required: false + schema: + type: integer + format: int64 + - description: The number to limit by + in: query + name: limit + required: false + schema: + type: integer + format: int64 + - description: A list of levels to filter by + in: query + name: levels + required: false + schema: + $ref: "../../components/schemas/_index.yaml#/LogLineLevelField" + - description: The search query to filter for + in: query + name: search + required: false + schema: + $ref: "../../components/schemas/_index.yaml#/LogLineSearch" + - description: What to order by + in: query + name: orderByField + required: false + schema: + $ref: "../../components/schemas/_index.yaml#/LogLineOrderByField" + - description: The order direction + in: query + name: orderByDirection + required: false + schema: + $ref: "../../components/schemas/_index.yaml#/LogLineOrderByDirection" + responses: + "200": + content: + application/json: + schema: + $ref: "../../components/schemas/_index.yaml#/LogLineList" + description: Successfully listed the events + "400": + content: + application/json: + schema: + $ref: "../../components/schemas/_index.yaml#/APIErrors" + description: A malformed or bad request + "403": + content: + application/json: + schema: + $ref: "../../components/schemas/_index.yaml#/APIErrors" + description: Forbidden + summary: List log lines + tags: + - Log diff --git a/api/v1/server/handlers/logs/list.go b/api/v1/server/handlers/logs/list.go new file mode 100644 index 000000000..80cb00413 --- /dev/null +++ b/api/v1/server/handlers/logs/list.go @@ -0,0 +1,91 @@ +package logs + +import ( + "math" + "strings" + + "github.com/labstack/echo/v4" + + "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers" + "github.com/hatchet-dev/hatchet/internal/repository" + "github.com/hatchet-dev/hatchet/internal/repository/prisma/db" +) + +func (t *LogService) LogLineList(ctx echo.Context, request gen.LogLineListRequestObject) (gen.LogLineListResponseObject, error) { + tenant := ctx.Get("tenant").(*db.TenantModel) + stepRun := ctx.Get("step-run").(*db.StepRunModel) + + limit := 1000 + offset := 0 + + listOpts := &repository.ListLogsOpts{ + Limit: &limit, + Offset: &offset, + StepRunId: &stepRun.ID, + } + + if request.Params.Search != nil { + listOpts.Search = request.Params.Search + } + + if request.Params.Levels != nil { + levels := make([]string, len(*request.Params.Levels)) + + for i, level := range *request.Params.Levels { + levels[i] = string(level) + } + + listOpts.Levels = levels + } + + if request.Params.OrderByField != nil { + listOpts.OrderBy = repository.StringPtr(string(*request.Params.OrderByField)) + } + + if request.Params.OrderByDirection != nil { + listOpts.OrderDirection = repository.StringPtr(strings.ToUpper(string(*request.Params.OrderByDirection))) + } + + if request.Params.Limit != nil { + limit = int(*request.Params.Limit) + listOpts.Limit = &limit + } + + if request.Params.Offset != nil { + offset = int(*request.Params.Offset) + listOpts.Offset = &offset + } + + listRes, err := t.config.Repository.Log().ListLogLines(tenant.ID, listOpts) + + if err != nil { + return nil, err + } + + rows := make([]gen.LogLine, len(listRes.Rows)) + + for i, log := range listRes.Rows { + rows[i] = *transformers.ToLogFromSQLC(log) + } + + // use the total rows and limit to calculate the total pages + totalPages := int64(math.Ceil(float64(listRes.Count) / float64(limit))) + currPage := 1 + int64(math.Ceil(float64(offset)/float64(limit))) + nextPage := currPage + 1 + + if currPage == totalPages { + nextPage = currPage + } + + return gen.LogLineList200JSONResponse( + gen.LogLineList{ + Rows: &rows, + Pagination: &gen.PaginationResponse{ + NumPages: &totalPages, + NextPage: &nextPage, + CurrentPage: &currPage, + }, + }, + ), nil +} diff --git a/api/v1/server/handlers/logs/service.go b/api/v1/server/handlers/logs/service.go new file mode 100644 index 000000000..1c96565a0 --- /dev/null +++ b/api/v1/server/handlers/logs/service.go @@ -0,0 +1,15 @@ +package logs + +import ( + "github.com/hatchet-dev/hatchet/internal/config/server" +) + +type LogService struct { + config *server.ServerConfig +} + +func NewLogService(config *server.ServerConfig) *LogService { + return &LogService{ + config: config, + } +} diff --git a/api/v1/server/oas/gen/openapi.gen.go b/api/v1/server/oas/gen/openapi.gen.go index 016da4747..57fe71210 100644 --- a/api/v1/server/oas/gen/openapi.gen.go +++ b/api/v1/server/oas/gen/openapi.gen.go @@ -28,13 +28,13 @@ const ( // Defines values for EventOrderByDirection. const ( - Asc EventOrderByDirection = "asc" - Desc EventOrderByDirection = "desc" + EventOrderByDirectionAsc EventOrderByDirection = "asc" + EventOrderByDirectionDesc EventOrderByDirection = "desc" ) // Defines values for EventOrderByField. const ( - CreatedAt EventOrderByField = "createdAt" + EventOrderByFieldCreatedAt EventOrderByField = "createdAt" ) // Defines values for JobRunStatus. @@ -46,6 +46,25 @@ const ( JobRunStatusSUCCEEDED JobRunStatus = "SUCCEEDED" ) +// Defines values for LogLineLevel. +const ( + DEBUG LogLineLevel = "DEBUG" + ERROR LogLineLevel = "ERROR" + INFO LogLineLevel = "INFO" + WARN LogLineLevel = "WARN" +) + +// Defines values for LogLineOrderByDirection. +const ( + LogLineOrderByDirectionAsc LogLineOrderByDirection = "asc" + LogLineOrderByDirectionDesc LogLineOrderByDirection = "desc" +) + +// Defines values for LogLineOrderByField. +const ( + LogLineOrderByFieldCreatedAt LogLineOrderByField = "createdAt" +) + // Defines values for PullRequestState. const ( Closed PullRequestState = "closed" @@ -339,6 +358,39 @@ type ListPullRequestsResponse struct { PullRequests []PullRequest `json:"pullRequests"` } +// LogLine defines model for LogLine. +type LogLine struct { + // CreatedAt The creation date of the log line. + CreatedAt time.Time `json:"createdAt"` + + // Message The log message. + Message string `json:"message"` + + // Metadata The log metadata. + Metadata map[string]interface{} `json:"metadata"` +} + +// LogLineLevel defines model for LogLineLevel. +type LogLineLevel string + +// LogLineLevelField defines model for LogLineLevelField. +type LogLineLevelField = []LogLineLevel + +// LogLineList defines model for LogLineList. +type LogLineList struct { + Pagination *PaginationResponse `json:"pagination,omitempty"` + Rows *[]LogLine `json:"rows,omitempty"` +} + +// LogLineOrderByDirection defines model for LogLineOrderByDirection. +type LogLineOrderByDirection string + +// LogLineOrderByField defines model for LogLineOrderByField. +type LogLineOrderByField string + +// LogLineSearch defines model for LogLineSearch. +type LogLineSearch = string + // PaginationResponse defines model for PaginationResponse. type PaginationResponse struct { // CurrentPage the current page @@ -711,6 +763,27 @@ type WorkflowVersionMeta struct { WorkflowId string `json:"workflowId"` } +// LogLineListParams defines parameters for LogLineList. +type LogLineListParams struct { + // Offset The number to skip + Offset *int64 `form:"offset,omitempty" json:"offset,omitempty"` + + // Limit The number to limit by + Limit *int64 `form:"limit,omitempty" json:"limit,omitempty"` + + // Levels A list of levels to filter by + Levels *LogLineLevelField `form:"levels,omitempty" json:"levels,omitempty"` + + // Search The search query to filter for + Search *LogLineSearch `form:"search,omitempty" json:"search,omitempty"` + + // OrderByField What to order by + OrderByField *LogLineOrderByField `form:"orderByField,omitempty" json:"orderByField,omitempty"` + + // OrderByDirection The order direction + OrderByDirection *LogLineOrderByDirection `form:"orderByDirection,omitempty" json:"orderByDirection,omitempty"` +} + // EventListParams defines parameters for EventList. type EventListParams struct { // Offset The number to skip @@ -854,6 +927,9 @@ type ServerInterface interface { // Get diff // (GET /api/v1/step-runs/{step-run}/diff) StepRunGetDiff(ctx echo.Context, stepRun openapi_types.UUID) error + // List log lines + // (GET /api/v1/step-runs/{step-run}/logs) + LogLineList(ctx echo.Context, stepRun openapi_types.UUID, params LogLineListParams) error // Create tenant // (POST /api/v1/tenants) TenantCreate(ctx echo.Context) error @@ -1177,6 +1253,70 @@ func (w *ServerInterfaceWrapper) StepRunGetDiff(ctx echo.Context) error { return err } +// LogLineList converts echo context to params. +func (w *ServerInterfaceWrapper) LogLineList(ctx echo.Context) error { + var err error + // ------------- Path parameter "step-run" ------------- + var stepRun openapi_types.UUID + + err = runtime.BindStyledParameterWithLocation("simple", false, "step-run", runtime.ParamLocationPath, ctx.Param("step-run"), &stepRun) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter step-run: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + ctx.Set(CookieAuthScopes, []string{}) + + // Parameter object where we will unmarshal all parameters from the context + var params LogLineListParams + // ------------- Optional query parameter "offset" ------------- + + err = runtime.BindQueryParameter("form", true, false, "offset", ctx.QueryParams(), ¶ms.Offset) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter offset: %s", err)) + } + + // ------------- Optional query parameter "limit" ------------- + + err = runtime.BindQueryParameter("form", true, false, "limit", ctx.QueryParams(), ¶ms.Limit) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter limit: %s", err)) + } + + // ------------- Optional query parameter "levels" ------------- + + err = runtime.BindQueryParameter("form", true, false, "levels", ctx.QueryParams(), ¶ms.Levels) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter levels: %s", err)) + } + + // ------------- Optional query parameter "search" ------------- + + err = runtime.BindQueryParameter("form", true, false, "search", ctx.QueryParams(), ¶ms.Search) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter search: %s", err)) + } + + // ------------- Optional query parameter "orderByField" ------------- + + err = runtime.BindQueryParameter("form", true, false, "orderByField", ctx.QueryParams(), ¶ms.OrderByField) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter orderByField: %s", err)) + } + + // ------------- Optional query parameter "orderByDirection" ------------- + + err = runtime.BindQueryParameter("form", true, false, "orderByDirection", ctx.QueryParams(), ¶ms.OrderByDirection) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter orderByDirection: %s", err)) + } + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.LogLineList(ctx, stepRun, params) + return err +} + // TenantCreate converts echo context to params. func (w *ServerInterfaceWrapper) TenantCreate(ctx echo.Context) error { var err error @@ -1984,6 +2124,7 @@ func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL router.GET(baseURL+"/api/v1/meta/integrations", wrapper.MetadataListIntegrations) router.POST(baseURL+"/api/v1/step-runs/:step-run/create-pr", wrapper.StepRunUpdateCreatePr) router.GET(baseURL+"/api/v1/step-runs/:step-run/diff", wrapper.StepRunGetDiff) + router.GET(baseURL+"/api/v1/step-runs/:step-run/logs", wrapper.LogLineList) router.POST(baseURL+"/api/v1/tenants", wrapper.TenantCreate) router.GET(baseURL+"/api/v1/tenants/:tenant/api-tokens", wrapper.ApiTokenList) router.POST(baseURL+"/api/v1/tenants/:tenant/api-tokens", wrapper.ApiTokenCreate) @@ -2495,6 +2636,42 @@ func (response StepRunGetDiff404JSONResponse) VisitStepRunGetDiffResponse(w http return json.NewEncoder(w).Encode(response) } +type LogLineListRequestObject struct { + StepRun openapi_types.UUID `json:"step-run"` + Params LogLineListParams +} + +type LogLineListResponseObject interface { + VisitLogLineListResponse(w http.ResponseWriter) error +} + +type LogLineList200JSONResponse LogLineList + +func (response LogLineList200JSONResponse) VisitLogLineListResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type LogLineList400JSONResponse APIErrors + +func (response LogLineList400JSONResponse) VisitLogLineListResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type LogLineList403JSONResponse APIErrors + +func (response LogLineList403JSONResponse) VisitLogLineListResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(403) + + return json.NewEncoder(w).Encode(response) +} + type TenantCreateRequestObject struct { Body *TenantCreateJSONRequestBody } @@ -3802,6 +3979,8 @@ type StrictServerInterface interface { StepRunGetDiff(ctx echo.Context, request StepRunGetDiffRequestObject) (StepRunGetDiffResponseObject, error) + LogLineList(ctx echo.Context, request LogLineListRequestObject) (LogLineListResponseObject, error) + TenantCreate(ctx echo.Context, request TenantCreateRequestObject) (TenantCreateResponseObject, error) ApiTokenList(ctx echo.Context, request ApiTokenListRequestObject) (ApiTokenListResponseObject, error) @@ -4207,6 +4386,32 @@ func (sh *strictHandler) StepRunGetDiff(ctx echo.Context, stepRun openapi_types. return nil } +// LogLineList operation middleware +func (sh *strictHandler) LogLineList(ctx echo.Context, stepRun openapi_types.UUID, params LogLineListParams) error { + var request LogLineListRequestObject + + request.StepRun = stepRun + request.Params = params + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.LogLineList(ctx, request.(LogLineListRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "LogLineList") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(LogLineListResponseObject); ok { + return validResponse.VisitLogLineListResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("Unexpected response type: %T", response) + } + return nil +} + // TenantCreate operation middleware func (sh *strictHandler) TenantCreate(ctx echo.Context) error { var request TenantCreateRequestObject @@ -5168,111 +5373,114 @@ func (sh *strictHandler) WorkflowVersionGetDefinition(ctx echo.Context, workflow // Base64 encoded, gzipped, json marshaled Swagger object var swaggerSpec = []string{ - "H4sIAAAAAAAC/+xdWXPbuJb+KyzOPMxUSZbtJH17/ObE7rTvOE7Kdjo1k3KlIBKS0KYINgDa8aT036ew", - "EiQBLtqu1NFTFBPLwcF3NuAA+BFGeJ7hFKaMhmc/QhrN4ByIn+efri4JwYT/zgjOIGEIii8RjiH/N4Y0", - "IihjCKfhWQiCKKcMz4PfAYtmkAWQ1w5E4UEIv4N5lsDw7OT18fEgnGAyByw8C3OUsl9eh4OQvWQwPAtR", - "yuAUknAxKDdf7836fzDBJGAzRGWfdnfheVHwCSqa5pBSMIVFr5QRlE5Fpzii3xKUPrq65H8PGA7YDAYx", - "jvI5TBlwEDAI0CRALIDfEWW0RM4UsVk+PorwfDSTfBrG8En/dlE0QTCJ69RwGsSngM0AszoPEA0ApThC", - "gME4eEZsJugBWZagCIyT0nSEKZg7GLEYhAT+lSMC4/Dsa6nrB1MYj/+EEeM0aqzQOlig+TticC5+/DuB", - "k/As/LdRgb2RAt7IoG5hugGEgJcaSapdDzUfIAN1WkDOZh0I4JXPedHFwt/6uWqr3INoRf6sTxfNswwT", - "Pim8URrgScApgilDkYCRPTFfwzGgKAoH4RTjaQL5SA0HayCpscpH9hWXLwK0UFXmKuXwcIDteQbZDCqI", - "o6IJjjVVKcCpkAuUUgbSyMLUGOMEgpQTIcDm5A3/whkimyhorMtOK1gVovVgPAi5hRTnJIJupEQEcuk5", - "Z25qGZpDS+6Iait4BjRQVUuUnx6fng5PTocnr+5Pj8+Ofzl7/evRr7/++r+hpQljwOCQN+xSAsijAVAs", - "mWYRMQhQGnz+fHURqKZtQsbj05PXvx7/Y3j6+hc4fP0KvBmC0zfx8PXJP345iU+iyeS/oE1UniM+kjn4", - "fg3TKUf8q18G4Ryl9n9r1OZZvCz3EkBZoOqvk4UVjIhRFZNsk+zByz1+hC6R+Z4hAqlrqF9mUIrE+aer", - "gPHqgSp91Hne55CBGEiEtmitEqC9snZfkTVD21F5mk/fvGnjoaFtYETOMMPJxCiCGbtKnxCDt/CvHFJW", - "5ycSnyVne4K2D0gH4fchBhkacvdkCtMh/M4IGDIwFVQ8gQTxeQnPzIgHQhQWNSBJel3jfSfgpaHjHbF7", - "ns7lLEm/YqVpEu13oY9mOKWwTiDTyK8jqURWMxmyFT8dn/IkUTz6jeD5HYPZbe4QuDEBaTS7UUxr7tMq", - "+2A6uocpSNtgCOcAJe4xi09afHLKrSIOJAzqXFgCZ7JrMRScwDbBl6P5AOdjSG55+ZqfJJpTjfnZL9vp", - "CdKqMmGikXVwQQyDJvnU3Sn/sv5OB8oLF4BZeNwKQZSLj5dPMHVw7hG+uMfwCF+MeENe92jNBkAyphuA", - "ivJXsZvcq4syw6sxhopAvAN5xuRxkuDn2zy9y+dzQF7aKBMM/VKv1mCHOLOtgTzoabkALidP87U+WP6l", - "PDnBf/zz7uNNMH5hkP5nu7YTTZvu/3s1DOg2rpFLNDMwRalx6JsY+smUNMpeaJnn7uGZGU495tCE7gqV", - "DSR+JDEkb18uEIGRJgmm+ZzPHKA85uJTZUl5ZS5U/d90ZK7rFg6lt+odBCSaOWM4H95rvJwA5IzShDrO", - "uSXgoipLBSRPy/6mf8Elg2nMaWlpWBXr0zLJ07RDy6pYn5ZpHkUQxu3sMAW7t87x8h4y5YpcoMnE7yTF", - "aDLpDlCrydaFDtky1yXvRfx7nmVXPMZOEk8UD6II5yn7Bp4AA+RbThIn3HSx1O1KDUJk9fKNQsZQOqXe", - "5pY2VH5t7iegQv3ANWaXjZYcfCvcQp9r2cAQ+i2GE5AnzPpsVjecvqemz6rqp+sWZrhOFYEZ9tMkvuLn", - "FJJ2d9gqO7CadRH0Tzx2YLxpQVaYTWtJVjkLf+Lx0YYC21qblMGsnwzWha/sBtXX2dAc4py5h68+tg39", - "CRKKcOrswS8Mhiy7ARN5y6F7ZtIZR0UgjWCS6FWabssRppLZGfAXuYWASqA4lrRTRGf9uv5TIrJpRjlo", - "ZUnP7K0AOgJpWe4LDlMGCOs3GMoAy2mH8XA3QJZV+L7N095mZgmUR4+QNItAn+Favn8byZb/U6m5vLyU", - "G9EAMbPgl5o7M03aw/t0eXNxdfM+HIS3n29u5K+7z+/eXV5eXF6Eg/C386tr8ePd+c27y2v+2+UKXqP0", - "sdD5FDFMXryx9xQxXqqwWnXNQ0wrgbQ7TsWjGrrxxvJWM1yvNDXyUZucxlaEsXE2Y9t2X8jpIKfXQnRt", - "qa7UZZkflYENKlx3YYQHOu5dla47XdWqDjlVnYglOup3P7caXpnVcGeExSl2eqq7Qr7bjW5zwy0SVX8+", - "TNhOJiwNugd5CnceRFi6Y8n2ha/pad1aim2aM6tU586tpts5bnfA4x/H3Nv7BD/CKCcEpuxbBqYwPDsd", - "hCn8rv/3ahCm+Vz8h4ZnJ8c83i07RKXKrn0rVSLIZO6A6fi0U6Rq0eLcAIXf6y2/6tZyMS7ndhtmILHj", - "d15ULDsliDK5VFqkZxx3CYwdyLdntgkrbwGFhSmr2QWr5O8QxN1KXl1YJewFjaLIjRh+azFu8WEPEMvy", - "5TbuEUv8wZo0aDdN8Zws8rF7UGdXqPVS5ZSDVhenfFMx8Eymg40PZVgY3mpnCmeQK9MowbS0T19w4xZy", - "eP08W4a3MEvAi1gI9G9N8a9XcVnnbnuHvzk1R1P4IIZEuCMtApGGKcxyV3BV4xwvxlsVwbtj5Uu7EPWw", - "dIaSmMCye9YyrA2FkhkgOt2uOyUEghiME+j3leV3kxECAx4uOn3vta1weHrwB2XWKEoRmo7I1ARKY3nl", - "zt3x7gyvtqJxzi4zXDI1loVY07rHciCE3j6XWUcp6jSMtyqPdtyW5exOgNv5/U+z3NS+slGUX7+Q4Zz5", - "hrCk/P2VwxyeT5g0yt2YvfZVI1mlYea6rSwpGSovLXVdMOVlfcqjg2bpM2JTpWHEz5j4Fqs6rQ4ZBJqR", - "Na4M2Vs3vs3+OpBxjCYIuvmCCeJxTdI+ALm9bcpb7T4UlDUtWqlf387v7q7e33y4vLkPB6H8j1i1WmlR", - "697kG5SZsvEcNl8GyMopJO0Zb95sEDvLaHPpRQuTc+e311ampWxmq1mIy+UwtSWjqBQUFLuzUPRnP9dk", - "Cf9KqGqhlIO3BEpKyVfFXNkpKi3Y2YFsihKUq6aRR0VTPJSCGt7ydkXAY8/pWlXCaoDqng3FRa+t9GcK", - "iazxKR8nKGqCgmivIQ3PpnlnJl3N3zKTfqvmSRuhj19uLm+5tbn4cHUTDsIPlx/eXt66DQlB0ykk1lbQ", - "+gLHzyLPu1MO6FrSL73z/Zm6BKPVMIA4JpBS20CU9LjWOHU7wT/8AYnxQ2rJ6uqEh7I6M0CDJ1Wc/xWR", - "MgVHzgMeG7H1MaJZAl5KNl8PvLcqLvPBNzPXeIrS5bODl5ullZKFM0DpMyYeg6m/NrNvCQJMtwtf4rEp", - "4eP1LZwiyvi/e8Tubp6pB6U7OFv6BEfXSbMVH52hjO6rzarZ8C3q5E2oPNmZa9q+iEDZt2rqiSDUR+lX", - "y1A7iEAaZJDw8XF6ui+iJEBsJRA2hoCds8aYpehOHAmjMGUBCGa69tF6z9dtPEaVYzlyLxJFMNVZptS3", - "vMvLiAVXkb9aHDguGl4tJagl1PUDagcEXyHbmY6gvUhXOmOW4Jc5bI8KdBsXpsY7nE7QtPWUuicpUick", - "HXkS3Twg4F9cTXTikUqOc4lk/7SsrYiLl0Pawjl0B5guzyE9xnvgVF4q67IfKnl7f8iK5ljOOsTOicaf", - "I3Ns6suCXyrnx9mab9mp2FuT7QXnWRbYaWVHjp3VbWTK98hk8w/5wcKWzKioHknVIhVcXTinRtd2m4SV", - "tky3bE2Exeh0r8KXcm5r9VSIcOO82R7r3doziyEgjhFnAUg+WeQwkkPHAOT+R3f2FHt3VQ25wgRvLIfa", - "Pk9ldruat6nk6hOM3770aPzeqmXlLSvd39NUOFpYPfv5D+vUgOJdebAPzejeET/P8kJ6CefGcrk9IHBc", - "KYLTT2Lr2QM8XuAumsE496SPwacOvqo5+6iycDaTvdITjqZSE8a43+W4ayrBZD2O9cqep3vJRFLYODAJ", - "i3eEQ3/iRkZDpsI35GF2W4cqj2ziySH75tutXrFb6h5hfzGv8M2VGvNUy+To0bDhz3ptmdS9bvYV6vib", - "iif6s9myKWUu68hxlXhwlZQbEldyY1DKXp06U5aZBZUeM0atQMwtyupjJ4XwbC0NdHUIdZ2eClDTrLlU", - "auihfbIvIHcI3ed9CXguf3YEVuA5+J/zD9dBbAr213flfjoQ7b5aa0sI+wlQwp1gGOUEsZe74t65MQQE", - "En09nbyQLjxTfy4GOGNMZHNFGD8iqIsjziH5J70EcRbWLicEGRJ3QCxE0DHBbibreyDPP13xqjIrPiz/", - "1cxSeHJ0fHQsJjmDKchQeBa+Ojo5OhbeA5uJoY1AhkYJehJmfAodC8jvIaNiSnmpFFIaGFeXY9BE9+G1", - "+v5ejIsoj1T0cnp8XG/4dwgSNhMq8o3r+w1mps/SzIRnXx8GIdV3OXAKi4J6Leuraj+awegxfOD1xVgJ", - "BPFL+2B5MdQ02ltdYJ3DFcQFDAdAXOwVMAImE5UE0TR6Q23r8J9O+D9DcXUUHf0wvxdCq2Dq4MktfMKP", - "MACpdevaBJMAqPSdGmvOMyQOs8nEAFldeqxgDpkwUV8br74SFxWEZwKlhcwYWkNb2mUITk367EpZ+IuH", - "2ky+rjPkLo8iSOkkT5KXgIjhxTKdSR/hey0nOMIpU/GFuiqUtzD6UyUzF0R3ub5Tba5VF5DmIOFDhnGA", - "STAGcUCKs2Cvj19th4zfMBmjOIbyzoYCmwo6fGLv1cxpeBZ/exiE34f6xkLxzeCqmPISgqWPOvoh/l2M", - "tOnzSbS8XFbfPwTS4l6gMm7NvUZSpFvxKm8xEihzwFWGltuE6vowV9zw5JjsCvwZQfBJCYDkiJiPgxSU", - "NLTFmUIG5MJCA/4lhkrYl0vNQ5BlI3uZnHoF4BpR5ltcr5s1s6rPq11Vim4Mbx0OOPcDYnmQu4TFk+2Q", - "8TkFOZthgv4PxrLjN9vp+ANkMxwHKWYBSBL8rC8eLLyXHyUH+evDouTOtMFVy44s0k02Rj+ms6H9l8VI", - "7It1lhmzi4Zgi8iIA+RdjIdNjteGVMjeU2viO17fT6RLc3CQ6P2V6IowVQW6Zg2rQrCSyIu/819DsR2+", - "KP7PRW4xGqs7JjqrBlOhUS28LUrtm2YYdEkr8BJZsLqRxL6d6jvg/H2qEt273I4GrN1h0k8JGrQdFOD+", - "KkBLZaxD+Y2e4XiG8aN/Bcfqe5rgMUgCXcWttOTCzXtR9Isp2b7EVQJuRjD/D4xNZwfM7hJmy4uIEiHA", - "hZB2j1sjcPRD/Vh0wqI6EdgFizKdvMBiqxFVjXrt57MF66161AeJ+dtJTA3HTRIzh82LlTTQu1fmlKze", - "37HeASpLygdVw78VsS72qazJPi6L2YzbFTC37KXYKV9qHjV/6zM5sh5WaokZQJIEpdK+WZQrb6WCG3VM", - "Xbcm9prhhA8PT8qj26XZLntilUlonmTKYDYkOQ8o9c/FSF5KP8yI38zJJ0ACEGR5kmhq1LaZ2f6uIkCd", - "JJEmT73hQrpYO32IxWvuNO2btnfyCjMsd1jXMvGNT9k4sCAj6CxnAcPq4bDKLNR4sNiggPUlvyRm6vED", - "IWSlEfzcmyu819fb6fUGc6HN07iiSJR4V2ClVYnJW2nY2jES2a5uYnUxTvP+JppMlH4x2mAM2TNUL6bN", - "MWX6zBv/BlKJqwkiVPzlyKeO3kMmrubZJz20IWn2PF3Rz1zG6omKgwT/KyWYy00sYb262MrPtM0doAEI", - "Uvjsy5uRQa4sGm7SmpZfJvMYURXVGCu6VbOpL43pYSCZuWfmbyxYfdIOlI0yYNMwV7ytgdyF6NEP+WNh", - "ZY01hzkmiYt2SxITZ2A6mBZzP5XbsJhe9nWDtH4jfT+rooMKesi8qcR7JtOM9ks/e1gMWoK73hmRRrfv", - "B9w3ZX+qD7gu1N0pG43Baq+yHuRrZflSgrBkfmezwSkOInmNDRWLarJgSQA9uZ37YmsGDe/uMRzQR5Rp", - "yv7KIXkpSMOTCRWLwA5S/E8PNHeXoDliwfjF06X4vGqP52YR8RG+UN7rBCUMEn+3vFyp11Uf22ygyTqk", - "34m2Qlf0JtC6LaADiSLmFs9fBoIEi7gJJh7qZIUSaa0sU29sOkj4Iq5CxYE4XuNnCbbf9+zVdellUA8P", - "ZOexeXy0kYYLq1h/OoraW0jBFgqrzU5xlNr51wcLVfUAFVs6J153MUwjoYA6Wiep1TpYKP0k8U8eEJV4", - "0Rf/gtkHGXDJQKCs5jrlgIgHVpqOj/HvNADGnMqKHgnQh8ZEoz9voOR4tsazTqdPIQnPhGi+bW+prruh", - "ksQdTJX3sJy4n3K9xkpe2E6bM04K0Sxd8049K9TWNeo/uZ2q8aPfmkKF24el65LFqmGxbQG768JdeRtG", - "ddCI9cO6nbVvVL7jvXn3SD2P0WcT6WQj0rnEVpIGxkEsnTtKhdx0l8sOlkr/YSj/v5BCnEDmuGjqQvyd", - "mqCqiyjLOnu7CliWq2bahoYd+25bW6VXImSXpbckSBKEBVx92QbleRR2DTDXvbMyauonCbLOQRJ22e76", - "31bpandzPcvbTt7oKLmSvr2RXDkh/SW3yfLN5ZMPPWM0Xcst4tarR4cYrcyPpWI0ze2DM+iK0QosrscX", - "dKXBNmbA2kmvLxLHvlzWvTV4f/fk2q5Z8WX51Fw5ZNRuK6O2hMVnQMWROV+KrZmeQjXweQ74RC+bZtus", - "J0YEEnVxvGc/Qrz2AmzSGo7hiOIHnbGLOyT1V/DbzgPJe2Rcw13shGI77I807o9wIG9WoTybB738rkaS", - "qBeiWjKcrbecfnIX3OLEIelyLdcJKgBWDq9AsqzDrRmtjKn93zbH2yTHcelsFQj1rMM+++GlAXvvl7A4", - "uMdSa54GWUpsD365W3INb/qdPSthanl5HmWkw1UF9tFWWjm47vSaK+/aWEee6UHUN0Vg6XYByuRKrzPp", - "V33rhm5r8u5Exc2ff7LxsuQRDb1AWoLuQf9UluzK3Nm4BqKdnGlRspv3cHCoC9N8cKnXb5j7yURHIRiR", - "PO0uCcV7xF186cOppl091WSn5/I+p5CZqT3ydKzfmNuWZ9OdMuvhnL0LYFZQlIIvB2Xpj2JWUJg5hYSO", - "opwQNZTmR3lUQfFsf00jfqaQvIfsnWpsg7jiPfUEk6D4cJvj/tzZy0FegZvGuJh+B4zVXagRSJIxiB69", - "cH6H55nMW+TI+Mj7D5zXwfGO1FW8oumPnJfvdPMVgL86Pm25XjRS/cb1fmcQxCoDJcGReXvWf1n2ohcz", - "9YjLnXbkp3jH2MvMO/51OU6Kqv3ZqN5V3joTBbk9OYjxNIGbQaRoeocRuQ4ASvatGYAF43YOgKvire2w", - "UXEqtny2w9x022rgeQt2euFGL0ftfbrHOon6Ux3t6eI+9rosv/Xojxd7I/lyoj/941x875cpLets6Eo2", - "2XgtuXfR+/pwOfLDEZbG4OVcPazZcoTFjy8CxUuxDelF/Hs/fMk64abyZHjja8CXHPkBXy1JKpxJS+Ar", - "wVPUkLV2jac0QGkAhG08anAwrkVDGzqOwE0wb39LN3d1irQTPJ3COEDpIcDe3ecSBGq6RtIJnuKctQgD", - "zlk3aeBN7QhGOSkHkO7Ry00CPV1hq05BzFDWIwSyKnULg+zzLKKa2v/ZKMDdnfaPh2wWHWKiZWIim4Pt", - "kCRwyueANPmrsgRtVKYbvZSad6DJ2CXHQjPvsIa/Fy6GhlC7ulbZszIvDpIuGa4ORSwzbjtmsso2GnPI", - "Wp7I3PX07iW2V3dMnnYmr7tHWvdAQ6cGcJl3YlI/O6dxd0o76YF6lWXQnDu553lYS6YWHNDvzipYMiux", - "XQZGCUofh3JjsSG8ROljAAJZzH51mOGm17s0qSrwROmjev9vnwRl/d5dwYhbw8mu5/USz0xs9fheZyHn", - "1CoJr1N8OJ78L37wR0i1C0kbUjWMoOm0KfK6lwXUZXRLHaPqfi/dLiiY5kTEJ0gowulRcDURLj/NOT5g", - "PJC5/YBBynShANFgAlk0g7EvW1GVDHdePyoYWLPa5zRz5UjO9rVi33cMD2fDdkkpah3Uciqt7XR1D7Wo", - "5JJ2DUe0xHdSiX/IwnsUnfwddOKGNYya1GXTt/WgD7pmB15crM3KxtwvrWdGMZygFOlkuD4qp6jZV/tc", - "FH0e9NDfTA9Zc7uaRrLwdVBOu6ic7AlaXk9VN/rGEBBIzEbfwLn1B8mT1hc5ScKzMFw8LP4/AAD//1AM", - "wFVa7gAA", + "H4sIAAAAAAAC/+x9W3PbuJL/V2Hx/3/YrZIs20nmzPrNiT05PsdxUrYzqd2UKwWRkIQxRXAA0I43pe++", + "hStBEuBFt5Emeopi4tJo/LrR3WgAP8IIzzOcwpTR8OxHSKMZnAPx8/zT1SUhmPDfGcEZJAxB8SXCMeT/", + "xpBGBGUM4TQ8C0EQ5ZThefBPwKIZZAHktQNReBDC72CeJTA8O3l9fDwIJ5jMAQvPwhyl7JfX4SBkLxkM", + "z0KUMjiFJFwMys3Xe7P+H0wwCdgMUdmn3V14XhR8goqmOaQUTGHRK2UEpVPRKY7otwSlj64u+d8DhgM2", + "g0GMo3wOUwYcBAwCNAkQC+B3RBktkTNFbJaPjyI8H80kn4YxfNK/XRRNEEziOjWcBvEpYDPArM4DRANA", + "KY4QYDAOnhGbCXpAliUoAuOkNB1hCuYORiwGIYF/5ojAODz7Wur6wRTG4z9gxDiNGiu0DhZo/o4YnIsf", + "/5/ASXgW/r9Rgb2RAt7IoG5hugGEgJcaSapdDzUfIAN1WkDOZh0I4JXPedHFwt/6uWqr3INoRf6sTxfN", + "swwTPim8URrgScApgilDkYCRPTFfwzGgKAoH4RTjaQL5SA0HayCpscpH9hWXLwK0UFXmKuXwcIDteQbZ", + "DCqIo6IJjjVVKcCpkAuUUgbSyMLUGOMEgpQTIcDm5A3/whkimyhorMtOK1gVovVgPAi5hRTnJIJupEQE", + "cuk5Z25qGZpDS+6Iait4BjRQVUuUnx6fng5PTocnr+5Pj8+Ofzl7/evRr7/++j+hpQljwOCQN+xSAsij", + "AVAsmWYRMQhQGnz+fHURqKZtQsbj05PXvx7/Y3j6+hc4fP0KvBmC0zfx8PXJP345iU+iyeS/oE1UniM+", + "kjn4fg3TKUf8q18G4Ryl9n9r1OZZvCz3EkBZoOqvk4UVjIhRFZNsk+zByz1+hC6R+Z4hAqlrqF9mUIrE", + "+aergPHqgSp91Hne55CBGEiEtmitEqC9snZfkTVD21F5mk/fvGnjoaFtYETOMMPJxCiCGbtKnxCDt/DP", + "HFJW5ycSnyVne4K2D0gH4fchBhkacvNkCtMh/M4IGDIwFVQ8gQTxeQnPzIgHQhQWNSBJel3jfSfgpaHj", + "HbF7ns7lLEm7YqVpEu13oY9mOKWwTiDTyK8jqURWMxmyFT8dn/IkUTz6jeD5HYPZbe4QuDEBaTS7UUxr", + "7tMq+2A6uocpSNtgCOcAJe4xi09afHLKV0UcSBjUubAEzmTXYig4gW2CL0fzAc7HkNzy8jU7STSnGvOz", + "X7bTE6RVZcJEI+vgghgGTfKpu1P+Zf2dDpQVLgCz8JgVgigXHy+fYOrg3CN8cY/hEb4Y8Ya87tGaFwDJ", + "mG4AKspfxW5yry7KDK/6GMoD8Q7kGZPHSYKfb/P0Lp/PAXlpo0ww9Eu9WsM6xJltDeRBT8sFcBl5mq/1", + "wfIv5ckJ/uNfdx9vgvELg/Q/27WdaNp0/+/VMKDbuEYu0czAFKXGoG9i6CdT0ih7oWWeu7tnZjh1n0MT", + "uitUNpD4kcSQvH25QARGmiSY5nM+c4Byn4tPlSXllblQ9X/TnrmuWxiU3qp3EJBo5vThfHiv8XICkNNL", + "E+o45ysBF1VZKiB5WrY3/QGXDKYxp6WlYVWsT8skT9MOLatifVqmeRRBGLezwxTs3jrHy3vIlClygSYT", + "v5EUo8mkO0CtJlsDHbJlrkveC//3PMuuuI+dJB4vHkQRzlP2DTwBBsi3nCROuOliqduUGoTI6uUbhYyh", + "dEq9zS29UPm1uZ+ACvUD15hda7Tk4FthFvpMywaG0G8xnIA8YdZnE91w2p6aPquqn65bmOE6VQRm2E+T", + "+IqfU0jazWGr7MBq1kXQv/DYgfGmgKxYNq2QrDIW/sDjow05trU2KYNZPxmsC1/ZDKrH2dAc4py5h68+", + "tg39CRKKcOrswS8Mhiy7AeN5y6F7ZtLpR0UgjWCS6ChNt3CEqWR2BvxFbiGgEiiOkHaK6Kxf139IRDbN", + "KAetLOmZvRVARyAty33BYcoAYf0GQxlgOe0wHm4GyLIK37d52nuZWQLl0SMkzSLQZ7iW7d9GsmX/VGou", + "Ly/lRjRAzCz4pebOTJO28D5d3lxc3bwPB+Ht55sb+evu87t3l5cXlxfhIPzt/Opa/Hh3fvPu8pr/dpmC", + "1yh9LHQ+RQyTF6/vPUWMlypWrbrmIaaVQK47TsWjGrrx+vJWM1yvNDXyUS85ja2IxcbZjL22+1xOBzm9", + "AtG1UF2pyzI/KgMbVLjuwgh3dNy7Kl13uqpVHXKqOhEhOuo3P7fqXplouNPD4hQ7LdVdId9tRreZ4RaJ", + "qj8fJmwjE5YG3YM8hTsPIizdsWT7wtb0tG6FYpvmzCrVuXOr6XaO2x1w/+caT69RCnvt1nE9Ij5zm5Qv", + "Uto6S/A0SFAK++zFyKwBZx+8OVWg1d711ZYlrOoaVxXG2PtWRSqD6cFi1TV8gom9fl1cvv3M16yrm98+", + "hoPwy/ntTTgIL29vP966FyqrHRPu6DTTJQpcQFPf//pokYaVW5vJjytEjMot9IwZqcoNUSMHA+zNsx9h", + "lBMCU/YtE9g9HYQp/K7/92oQpvlc/IeGZyfHi0FVskqVXZu5qkSQSRSajk87hW8sWpxZAfB7veVX3Vou", + "xuXcg8YMJHZQixcVsdgEUSb3D4qcpeMu0SLHcmCruyYF+hZQWNh3tTm2Sv4TgrhbyasLq4Qd5SuK3Ijh", + "txbjZjDsodll+XIb94gl/giGtPJumoIcssjH7pEOu0KtlyqnHLS6OOWbioFnMh1sfCjDwvBW6wOcQW5h", + "RAmmpeSVghu3kMPr59lHv4VZAl5EdNy/X8u/XsVlpb/ttJfmfDVN4YMYEuHepfDOG6Ywy10RhxrneDHe", + "qohoOcLBesGqx2pmKIkJLPssLcPaUHwlA0TnoHanhEAQg3EC/Q6k/G7SpGBAGcycBtrawn6eHvyRCmsU", + "pbCFDlOoCZSL5ZU7oc2bLrFamO+cXWa4tNRYK8SagoHLgRB6+1wmuFjUaRhvVR7tYEaWszsBbuf3P0wM", + "tj3cV5Rfv5DhnPmGsKT8/ZnDHJ5PmFyUuzF77aFUWaVh5rqFW5UMleOtXXcReFmf8uigWfqM2FRpGPEz", + "Jr4IbqeQqUGgGVljuNTez/RlwNSBjGM0QdDNF0wQ92uS9gHInA9T3mr3oaCsKZKrfn07v7u7en/z4fLm", + "PhyE8j8ilLtSpPfeJOGUmbLxxE5fWtTKeVXtaaDeFCk79W5zOXcLk4jqX6+t9GPZzFZTc5dL7GvL0FJ5", + "WSh2p2bpz36uyRL+7QHVQikxdQmUlDISi7my87ZasLMDQaMSlKtLI/eKpngoBTW85e0Kh8ee07WqhNUA", + "1T1FkIteW+nPFBJZ41M+TlDUBAXRXkNuqk3zzky6mr9lJv1WzZNehD5+ubm85avNxYerm3AQfrj88PbS", + "HYm9J2g6hcTaH12f4/hZHH7olBi9lpxk73x/pi7BaF0YQBwTSKm9QJT0uNY49XWCf/gdEmOH1E5wqGNP", + "atWZARo8qeL8r4iUKThynnrayFofI5ol4KW05uuB91bFZT74ZuYaT1G6fMr8crO0UgZ9Bih9xsSzYOqv", + "zexbggDT7cKXjW9K+Hh9C6eIMv7vHrG7m2XqQekOzpY+1tR10mzFR2coo/u6ZtXW8C3q5E2oPNmZa9q+", + "CEfZFzX1eBDqo7SrpasdRCANMkj4+Dg93YMoCRBbCYSNIWDnrNFnKboT5yQpTFkAgpmufbTeQ6cb91Hl", + "WI7cQaIIpjr1mvrCu7yMCLiKpO7iFH7R8Gp5ci2urh9QOyD4CtnOXW1tRbpyfLMEv8xhu1eg27gwNd7h", + "dIKmrVc3eDKFdZbekSf70wMC/sXVRCceqYxRl0j2z1Xcirh4OaRXOIfuANPlOaTHeA+cykulIvdDJW/v", + "d1nRnFVbh9g50fhzpFNOfUdDlkqEc7bmCzsVe2uyveA8ywI71/LIsbO6jeMjPdI7/UN+sLAlMyqq57S1", + "SAVXF86p0bXdS8JKW6ZbXk3EitHpspEv5YTv6lEpYcZ5sz3Wu7VngiEgjhFnAUg+WeQwkkPHAOT+R3f2", + "FHt3VQ25wgRv7GCBfcjQ7HY1b1PJ6BOM3770aPzeqmUl8yvd33OpcLSw+pGA362jNIp35cE+NKN7R+w8", + "ywrpJZwbO+DgAYEjcxenn8TWswd4vMBdNINx7kkfg08dbFVzIFhl4Wwme6UnHE2lJoxxu8txAVuCyXoM", + "65UtT3fIRFLYODAJi3eEQ3/iRkZDpsI35GF2W4cqj2ziySH75tutXrFb6h5hfzGv8M2VGvNUy+To0bDh", + "z3rXMql73ewr1PE35U/0Z7O1ppS5rD3HVfzBVVJuSFzJjUEpe3XqTFlmFlR6zBi1HDG3KKuPnRTCsxUa", + "6GoQ6jo9FaCmWXOp1NBD+2RfQG4Qug/BE/Bc/uxwrMBz8N/nH66D2BTsr+/K/XQg2n3f3JYQ9hOghBvB", + "MMoJYi93xWWMYwgIJPrORnlLY3im/lwMcMaYyOaKMH5EUBdHnEPyTzoEcRbWbuwEGRIXoyyE0zHBbibr", + "y1HPP13xqjIrPiz/1cxSeHJ0fHQsJjmDKchQeBa+Ojo5OhbWA5uJoY1AhkYJehLL+BQ6AsjvIaPyzBN6", + "gimkNDCmLseg8e7Da/X9vRgXURap6OX0+Lje8D8hSNhMqMg3ru83mJk+SzMTnn19GIRUX3DCKSwK6ljW", + "V9V+NIPRY/jA64uxEgjil/bB8mKoabS3usA6hyuICxgOgLjtLmAETCYqCaJp9Iba1uE/nfB/huI+NTr6", + "YX4vhFbB1MGTW/iEH2EAUusqwgkmAVDpOzXWnGdInPCUiQGyurRYwRwysUR9bbwPTtzeEZ4JlBYyY2gN", + "bWmXLjg16bMrZeEvHmoz+brOkLs8iiClkzxJXgIihhfLdCZ9rvW1nOAIp0z5F+r+XN7C6A+VzFwQ3eVO", + "W7W5Vg0gzUHChwzjAJNgDOKAFAckXx+/2g4Zv2EyRnEM5UUmBTYVdPjE3quZ0/As/vYwCL8P9TWe4pvB", + "VTHlJQRLG3X0Q/y7GOmlzyfR8sZlfSkXSIvLssq4NZd9SZFuxau82kugzAFX6VpuE6rrw1xx7Zljsivw", + "ZwTBJyUAkiNiPg5SUNLQFmcKGZCBhQb8SwyVsC9DzUOQZSM7TE69AnCNKPMF1+vLmonq82pXlaIbw1uH", + "U//9gFge5C5h8WQ7ZHxOQc5mmKD/hbHs+M12Ov4A2QzHQYpZAJIEP+vbOAvr5UfJQP76sCiZM21w1bIj", + "i3STjdGP6Wxo/2UxEvtinWXG7KIh2CIy4laFLouHTY53DamQvaerie/OiX4iXZqDg0Tvr0RXhKkq0LXV", + "sCoEK4m8+Dv/NRTb4Yvi/1zkFqOxunils2owFRrVwtui1L5phkGXtAIvkQWrG0ns26m+GNHfpyrRvcvt", + "aMDaxT79lKBB20EB7q8CtFTGOpTf6BmOZxg/+iM4Vt/TBI9BEugqbqUlAzfvRdEvpmR7iKsE3Ixg/h8Y", + "m84OmN0lzJaDiBIhwIWQdotbI3D0Q/1YdMKiOhHYBYsynbzAYusiqhr1rp/PFqy3alEfJOZvJzE1HDdJ", + "zBw2ByupucrNnJLV+zvW41hlSfmgavi3ItbFPpU12cdkMZtxuwLmlr0UO+VLzeOH4nK8ykyOrNfGWnwG", + "kCRBqbRvFmXkrVRwo4ap6yrRXjOc8OHhSXl0uzTbZUusMgnNk0wZzIYk5w6l/rkYyVv3hhnxL3PyXZwA", + "BFmeJJoatW1mtr+rCFAnSeSSpx42Il1WO32Ixbvcado3vd7JK8yw3GFdy8Q3vu/kwIL0oLOcBQyr1/Qq", + "s1DjwWKDAtaX/JKYqdsdhZCVRvBzb67wXl9vp9cbzIU2T+OKIlHiXYGVViUmb6Vha8dIZLu6idXFOM37", + "m2gyUfrFaIMxZM9QPSM4x5TpM2/8G0glriaIUPGXI586eg+ZuJpnn/TQhqTZ855Lv+UyVu+2HCT4r5Rg", + "LjexhPWGxDbB02aTkJprpGlFcuuyaF94vCeCOGh4UonhgD6iTNP2Zw7JS0EcnkyocGUcpPgv0G3uLkFz", + "xILxi6dL8XnVHs+NKZzAJ5hQ3u8EJQySho5FyVLPXS/GlrdBe0ZOxaXPgejNomOCiYcQWaEvIepuaQcR", + "X8QdWTgQeZf+8WP7ZuuenZduxfbwQXYfm6u3G6m4sIotQ0lRf8P7CZY2aFt8OCTt9Bx6SM2pOIRGC1tr", + "wTWe9l8G5Gfa5hXSAAQpfPalT8pYpywabtKpKr/a6vGlVHDLOFNb9Z703WE9/CRmrhv7G2O8D8SVq2LA", + "phGueFsDuQvRox/yx8JKHm6OdplcXtotV7irYWOuKXSbNaaXfc2Tqb/W08+50LGlg5avanmTcEz7ZSE/", + "LAYtMb7eifFGt+8H3De1/lQft1+oK7Q2GoqrvVh/kK+V5UsJwpJp/s0LTnEetcGPBkmijNuSAHpS/Pdl", + "rfmZHehH+NLJfeblSr2u+hB5A03WXS2daCt0RW8CrUtjOpC4DX/ffn98y95+/dX0v8bXd7/+voWTOAc/", + "fy0WoGJL5/M3XRamkVBAHVcnqdU6rFD/hi8Hh6hQ00vhXzD7IAMuGQjUqrlOOSDina2mU8T8Ow2AWU5l", + "RY8E6LPDotGf11FyvF7midPpw6jCMiGab9sL1XVfqCRxh6XKe2ZaXFO83sVKvttBmxMPC9EsvfZBPRFq", + "6zWNn3ydqvGjX0yhwu1D6Lq0YtWw2BbA7hq4K2/DqA4asX6I21n7RuWnPpp3j9QrSX02kU42Ip1LbCVp", + "YBzE0rmjVMhNd7nssFLpPwzl/xdSiBPIHPcNXoi/U+NUdRFlWWdvo4BluWqmbWjYse9ra6v0SoTssvSW", + "BEmCsICrL9ugPI9iXQPMdf249Jr6SYKsc5CEXV53/U9sdV13cz3L207e6Ci5kr69kVw5If0lt2nlm8uX", + "f3r6aLqWW8Stx+8OPlqZH0v5aJrbB2PQ5aMVWFyPLehKq248CGGffXiROPYdadjbBe/vfsai6+Gosnxq", + "rhwOVmzrYEUJi8+AipPTvpMWZnoK1cDnOeATvWyabbOeGBFI1Pshnv0I8egXsElrOI0pih90xi7ukJA8", + "VVPVYhSbY6HyOjHXcBc7odgO+yON+yMcyJtVKM/mXUe/qZEk6qHAlgxn60m/n9wEtzhxSLpcy62yCoCV", + "M4yQLGtwa0arxdT+b5vhbZLjuHS2CoR63Wef7fDSgL3XDFkc3GOpNS9ELSW2B7vcLbmGN/2OIJcwtbw8", + "jzLS4cYa+4YDWrm/xGk1V543s26+oAdR3xSBpUtmKJORXmfSr/rWDd3W5N2Jips//2TjZckjGjpAWoLu", + "Qf9UQnZl7mxcA9FOxrQo2c16OBjUxdJ8MKnXvzD3k4mOQjAiedpdEopn6bvY0odTTbt6qslOz+V9TiEz", + "U3vk6Vg/Nboty6Y7Zdb7aXvnwKygKAVfDsrS78WsoDBzCgkdRTkhaijNb7OpggGvVtOInykk7yF7pxrb", + "IK54Tz3BJCg+XOq7P1e3c5BX4KYxLqbfAWN1JXYEkmQMokcvnN/heSbzFjkyPvL+A+etoLwjdSO7aPoj", + "5+U73XwF4K+OT1tumY5Uv3G93xkEscpASXBkniD3v5mw6MVMPeJypx35KZ6z9zLzjn9djpOian82quf1", + "t85EQW5PDmI8TeBmECma3mFErgOAkn1rBmDBuJ0D4Kp4aztsVJyKLZ/tMBeety7wvAU7vXCjd2T3Pt1j", + "nUT9qY72dDEfe72Z0nr0x4u9kXxA15/+cS6+98uUlnU2dCWbbLyW3Lvo/YqEHPnhCEuj83Ku3lduOcLi", + "xxeB4sHwhvQi/r0fvmSdcFN5MrzxNeBLjvyAr5YkFc6kJfCV4ClqyFq7xlMaoDQAYm08ajAwrkVDGzqO", + "wJdg3v6Wbu7q5GkneDqFcYDSg4O9u6/mCNR09aQTPMU5axEGnLNu0sCb2hGMclIOIN2jB/wEerrCVp2C", + "mKGshwtkVermBtnnWUQ1tf+zUYC7O+3vD9ksOvhEy/hENgfbIUnglM8BabJXZQnaqEw3eik170CTsUuG", + "hWbeIYa/FyaGhlC7ulbZszIvDpIuGa4ORSwzbjtmsso2GnPIWl5K3vX07iW2V3dMnnYmr7tHWvdAQ6cG", + "cJl3YlI/O6dxd0o76YF6lWXQnDu553lYS6YWHNDvzipYMiuxXQZGCUofh3JjscG9ROljAAJZzH58nuGm", + "Rxw1qcrxROmjegZ2nwRl/dZdwYhbw8mu5/USz0xs9fheZyHn1CoJr1N8OJ78F7/7JqTahaQNqRpG0HTa", + "5HndywLqMrqljlF1v5duFxRMcyLiEyQU4fQouJoIk5/mHB8wHsjcfsAgZbpQgGgwgSyawdiXrahKhjuv", + "HxUMrFntc5q5ciRn+1qx73O2h7Nhu6QUtQ5qOZXWdrq6h1pUckm7uiNa4jupxN9l4T3yTv4OOnHDGkZN", + "6rLp23rQB12zAw/v1mZlY+aX1jOjGE5QinQyXB+VU9Tsq30uij4PeuhvpoesuV1NI1n4OiinXVRO9gQt", + "r6eqG31jCAgkZqNv4Nz6g+RJ64ucJOFZGC4eFv8XAAD//1ukIaB29wAA", } // GetSwagger returns the content of the embedded swagger specification file diff --git a/api/v1/server/oas/transformers/log.go b/api/v1/server/oas/transformers/log.go new file mode 100644 index 000000000..94819deaa --- /dev/null +++ b/api/v1/server/oas/transformers/log.go @@ -0,0 +1,27 @@ +package transformers + +import ( + "encoding/json" + + "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" + "github.com/hatchet-dev/hatchet/internal/repository/prisma/dbsqlc" +) + +func ToLogFromSQLC(log *dbsqlc.LogLine) *gen.LogLine { + res := &gen.LogLine{ + CreatedAt: log.CreatedAt.Time, + Message: log.Message, + } + + if log.Metadata != nil { + meta := map[string]interface{}{} + + err := json.Unmarshal(log.Metadata, &meta) + + if err == nil { + res.Metadata = meta + } + } + + return res +} diff --git a/api/v1/server/run/run.go b/api/v1/server/run/run.go index 81814156e..3f2e52591 100644 --- a/api/v1/server/run/run.go +++ b/api/v1/server/run/run.go @@ -12,6 +12,7 @@ import ( apitokens "github.com/hatchet-dev/hatchet/api/v1/server/handlers/api-tokens" "github.com/hatchet-dev/hatchet/api/v1/server/handlers/events" githubapp "github.com/hatchet-dev/hatchet/api/v1/server/handlers/github-app" + "github.com/hatchet-dev/hatchet/api/v1/server/handlers/logs" "github.com/hatchet-dev/hatchet/api/v1/server/handlers/metadata" stepruns "github.com/hatchet-dev/hatchet/api/v1/server/handlers/step-runs" "github.com/hatchet-dev/hatchet/api/v1/server/handlers/tenants" @@ -28,6 +29,7 @@ type apiService struct { *users.UserService *tenants.TenantService *events.EventService + *logs.LogService *workflows.WorkflowService *workers.WorkerService *metadata.MetadataService @@ -41,6 +43,7 @@ func newAPIService(config *server.ServerConfig) *apiService { UserService: users.NewUserService(config), TenantService: tenants.NewTenantService(config), EventService: events.NewEventService(config), + LogService: logs.NewLogService(config), WorkflowService: workflows.NewWorkflowService(config), WorkerService: workers.NewWorkerService(config), MetadataService: metadata.NewMetadataService(config), diff --git a/cmd/hatchet-engine/engine/run.go b/cmd/hatchet-engine/engine/run.go index 2a82570fa..a48ed84b7 100644 --- a/cmd/hatchet-engine/engine/run.go +++ b/cmd/hatchet-engine/engine/run.go @@ -178,6 +178,9 @@ func Run(ctx context.Context, cf *loader.ConfigLoader) error { ingestor.WithEventRepository( sc.Repository.Event(), ), + ingestor.WithLogRepository( + sc.Repository.Log(), + ), ingestor.WithTaskQueue(sc.TaskQueue), ) if err != nil { diff --git a/frontend/app/src/components/ui/code-editor.tsx b/frontend/app/src/components/ui/code-editor.tsx index fc22323be..a17be6464 100644 --- a/frontend/app/src/components/ui/code-editor.tsx +++ b/frontend/app/src/components/ui/code-editor.tsx @@ -12,6 +12,7 @@ interface CodeEditorProps { width?: string; copy?: boolean; wrapLines?: boolean; + lineNumbers?: boolean; } export function CodeEditor({ @@ -23,6 +24,7 @@ export function CodeEditor({ width, copy, wrapLines = true, + lineNumbers = false, }: CodeEditorProps) { const setEditorTheme = (monaco: Monaco) => { monaco.editor.defineTheme('pastels-on-dark', getMonacoTheme()); @@ -48,7 +50,11 @@ export function CodeEditor({ options={{ minimap: { enabled: false }, wordWrap: wrapLines ? 'on' : 'off', - lineNumbers: 'off', + lineNumbers: lineNumbers + ? function (lineNumber) { + return `${lineNumber}`; + } + : 'off', theme: 'pastels-on-dark', autoDetectHighContrast: true, readOnly: !setCode, diff --git a/frontend/app/src/lib/api/generated/Api.ts b/frontend/app/src/lib/api/generated/Api.ts index c3614a421..a8778a7c6 100644 --- a/frontend/app/src/lib/api/generated/Api.ts +++ b/frontend/app/src/lib/api/generated/Api.ts @@ -34,6 +34,11 @@ import { ListGithubBranchesResponse, ListGithubReposResponse, ListPullRequestsResponse, + LogLineLevelField, + LogLineList, + LogLineOrderByDirection, + LogLineOrderByField, + LogLineSearch, PullRequestState, RejectInviteRequest, ReplayEventRequest, @@ -772,6 +777,47 @@ export class Api extends HttpClient + this.request({ + path: `/api/v1/step-runs/${stepRun}/logs`, + method: "GET", + query: query, + secure: true, + format: "json", + ...params, + }); /** * @description Get the diff for a step run between the most recent run and the first run. * diff --git a/frontend/app/src/lib/api/generated/data-contracts.ts b/frontend/app/src/lib/api/generated/data-contracts.ts index a2b2635cd..d2c6f788f 100644 --- a/frontend/app/src/lib/api/generated/data-contracts.ts +++ b/frontend/app/src/lib/api/generated/data-contracts.ts @@ -684,3 +684,40 @@ export enum PullRequestState { Open = "open", Closed = "closed", } + +export interface LogLine { + /** + * The creation date of the log line. + * @format date-time + */ + createdAt: string; + /** The log message. */ + message: string; + /** The log metadata. */ + metadata: object; +} + +export enum LogLineLevel { + DEBUG = "DEBUG", + INFO = "INFO", + WARN = "WARN", + ERROR = "ERROR", +} + +export interface LogLineList { + pagination?: PaginationResponse; + rows?: LogLine[]; +} + +export enum LogLineOrderByField { + CreatedAt = "createdAt", +} + +export enum LogLineOrderByDirection { + Asc = "asc", + Desc = "desc", +} + +export type LogLineSearch = string; + +export type LogLineLevelField = LogLineLevel[]; diff --git a/frontend/app/src/lib/api/queries.ts b/frontend/app/src/lib/api/queries.ts index 2679e52bb..2e879d730 100644 --- a/frontend/app/src/lib/api/queries.ts +++ b/frontend/app/src/lib/api/queries.ts @@ -5,6 +5,7 @@ import invariant from 'tiny-invariant'; import { PullRequestState } from '.'; type ListEventQuery = Parameters[1]; +type ListLogLineQuery = Parameters[1]; type ListWorkflowRunsQuery = Parameters[1]; export const queries = createQueryKeyStore({ @@ -104,6 +105,10 @@ export const queries = createQueryKeyStore({ queryKey: ['step-run:get:diff', stepRun], queryFn: async () => (await api.stepRunGetDiff(stepRun)).data, }), + getLogs: (stepRun: string, query: ListLogLineQuery) => ({ + queryKey: ['log-lines:list', stepRun], + queryFn: async () => (await api.logLineList(stepRun, query)).data, + }), }, events: { list: (tenant: string, query: ListEventQuery) => ({ diff --git a/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-logs.tsx b/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-logs.tsx new file mode 100644 index 000000000..7d7247798 --- /dev/null +++ b/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-logs.tsx @@ -0,0 +1,40 @@ +import { + LogLineOrderByDirection, + StepRun, + StepRunStatus, + queries, +} from '@/lib/api'; +import { useQuery } from '@tanstack/react-query'; +import { CodeEditor } from '@/components/ui/code-editor'; + +export function StepRunLogs({ stepRun }: { stepRun: StepRun | undefined }) { + const getLogsQuery = useQuery({ + ...queries.stepRuns.getLogs(stepRun?.metadata.id || '', { + orderByDirection: LogLineOrderByDirection.Asc, + }), + enabled: !!stepRun, + refetchInterval: () => { + if (stepRun?.status === StepRunStatus.RUNNING) { + return 1000; + } + + return 5000; + }, + }); + + const logLines = + getLogsQuery.data?.rows?.reduce( + (acc, row) => acc + row.createdAt + ': ' + row.message + '\n', + '', + ) || 'No logs found'; + + return ( + + ); +} diff --git a/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-output.tsx b/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-output.tsx index bdb45ebbf..35022412f 100644 --- a/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-output.tsx +++ b/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-output.tsx @@ -1,21 +1,13 @@ import { CodeEditor } from '@/components/ui/code-editor'; import { Loading } from '@/components/ui/loading'; -import { - StepConfigurationSection, - StepDurationSection, - StepStatusSection, -} from '..'; -import { StepRun } from '@/lib/api'; export interface StepRunOutputProps { - stepRun: StepRun; output: string; isLoading: boolean; errors: string[]; } export const StepRunOutput: React.FC = ({ - stepRun, output, isLoading, errors, @@ -36,9 +28,6 @@ export const StepRunOutput: React.FC = ({ 2, )} /> - - - ); }; diff --git a/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-playground.tsx b/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-playground.tsx index 3b7413287..e46a5e21b 100644 --- a/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-playground.tsx +++ b/frontend/app/src/pages/main/workflow-runs/$run/components/step-run-playground.tsx @@ -6,7 +6,6 @@ import api, { queries, } from '@/lib/api'; import { useEffect, useMemo, useState } from 'react'; -import { RunStatus } from '../../components/run-statuses'; import { Button } from '@/components/ui/button'; import invariant from 'tiny-invariant'; import { useApiError } from '@/lib/hooks'; @@ -18,7 +17,12 @@ import { GitHubLogoIcon, PlayIcon } from '@radix-ui/react-icons'; import { StepRunOutput } from './step-run-output'; import { StepRunInputs } from './step-run-inputs'; import { Loading } from '@/components/ui/loading'; -import { StepStatusDetails } from '..'; +import { + StepConfigurationSection, + StepDurationSection, + StepStatusDetails, + StepStatusSection, +} from '..'; import { TooltipProvider, Tooltip, @@ -27,6 +31,9 @@ import { } from '@/components/ui/tooltip'; import { VscNote, VscJson } from 'react-icons/vsc'; import { CreatePRDialog } from './create-pr-dialog'; +import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs'; +import { StepRunLogs } from './step-run-logs'; +import { RunStatus } from '../../components/run-statuses'; export function StepRunPlayground({ stepRun, @@ -248,7 +255,7 @@ export function StepRunPlayground({ <>
- Playground/{stepRun?.step?.readableId} + {stepRun?.step?.readableId}
@@ -333,7 +340,9 @@ export function StepRunPlayground({
- Inputs +
+ Input +
{stepInput && (
-
-
-
Outputs
+ +
+
+ + + Output + + + Logs + + +
0 @@ -357,20 +375,29 @@ export function StepRunPlayground({ } />
- !!e) as string[] - } - /> -
+ + + !!e) as string[] + } + /> + + + + + + + + +
diff --git a/frontend/docs/pages/home/python-sdk/creating-a-workflow.mdx b/frontend/docs/pages/home/python-sdk/creating-a-workflow.mdx index 80432059c..f06bf684a 100644 --- a/frontend/docs/pages/home/python-sdk/creating-a-workflow.mdx +++ b/frontend/docs/pages/home/python-sdk/creating-a-workflow.mdx @@ -160,4 +160,24 @@ def step1(self, context): pass ``` -If you need control over cancellation, you can also use `context.cancel()` to cancel the current step, though this is not recommended. \ No newline at end of file +If you need control over cancellation, you can also use `context.cancel()` to cancel the current step, though this is not recommended. + +## Logging + +Hatchet comes with a built-in logging view where you can push debug logs from your workflows. To use this, you can use the `context.log` method. For example: + +```py +@hatchet.workflow(on_events=["user:create"],schedule_timeout="10m") +class LoggingWorkflow: + @hatchet.step() + def logger(self, context : Context): + + for i in range(1000): + context.log(f"Logging message {i}") + + return { + "step1": "completed", + } +``` + +Each step is currently limited to 1000 log lines. \ No newline at end of file diff --git a/frontend/docs/pages/home/typescript-sdk/creating-a-workflow.mdx b/frontend/docs/pages/home/typescript-sdk/creating-a-workflow.mdx index 89bf77d20..270d5d2d2 100644 --- a/frontend/docs/pages/home/typescript-sdk/creating-a-workflow.mdx +++ b/frontend/docs/pages/home/typescript-sdk/creating-a-workflow.mdx @@ -296,4 +296,32 @@ await worker.registerWorkflow({ }); ``` -This will then appear in the Hatchet UI under the `prompt` value. \ No newline at end of file +This will then appear in the Hatchet UI under the `prompt` value. + +## Logging + +Hatchet comes with a built-in logging view where you can push debug logs from your workflows. To use this, you can use the `ctx.log` method. For example: + +```ts +const workflow: Workflow = { + id: 'logger-example', + description: 'test', + on: { + event: 'user:create', + }, + steps: [ + { + name: 'logger-step1', + run: async (ctx) => { + for (let i = 0; i < 1000; i++) { + ctx.log(`log message ${i}`); + } + + return { step1: 'completed step run' }; + }, + }, + ], +}; +``` + +Each step is currently limited to 1000 log lines. \ No newline at end of file diff --git a/internal/config/loader/loader.go b/internal/config/loader/loader.go index a69daa5b2..2012e7eb2 100644 --- a/internal/config/loader/loader.go +++ b/internal/config/loader/loader.go @@ -181,6 +181,7 @@ func GetServerConfigFromConfigfile(dc *database.Config, cf *server.ServerConfigF ingestor, err := ingestor.NewIngestor( ingestor.WithEventRepository(dc.Repository.Event()), + ingestor.WithLogRepository(dc.Repository.Log()), ingestor.WithTaskQueue(tq), ) diff --git a/internal/repository/logs.go b/internal/repository/logs.go new file mode 100644 index 000000000..edc920013 --- /dev/null +++ b/internal/repository/logs.go @@ -0,0 +1,60 @@ +package repository + +import ( + "time" + + "github.com/hatchet-dev/hatchet/internal/repository/prisma/dbsqlc" +) + +type CreateLogLineOpts struct { + // The step run id + StepRunId string `validate:"required,uuid"` + + // (optional) The time when the log line was created. + CreatedAt *time.Time + + // (required) The message of the log line. + Message string `validate:"required,min=1,max=10000"` + + // (optional) The level of the log line. + Level *string `validate:"omitnil,oneof=INFO ERROR WARN DEBUG"` + + // (optional) The metadata of the log line. + Metadata []byte +} + +type ListLogsOpts struct { + // (optional) number of logs to skip + Offset *int + + // (optional) number of logs to return + Limit *int `validate:"omitnil,min=1,max=1000"` + + // (optional) a list of log levels to filter by + Levels []string `validate:"omitnil,dive,oneof=INFO ERROR WARN DEBUG"` + + // (optional) a step run id to filter by + StepRunId *string `validate:"omitempty,uuid"` + + // (optional) a search query + Search *string + + // (optional) the order by field + OrderBy *string `validate:"omitempty,oneof=createdAt"` + + // (optional) the order direction + OrderDirection *string `validate:"omitempty,oneof=ASC DESC"` +} + +type ListLogsResult struct { + Rows []*dbsqlc.LogLine + Count int +} + +type LogsRepository interface { + // PutLog creates a new log line. + PutLog(tenantId string, opts *CreateLogLineOpts) (*dbsqlc.LogLine, error) + + // ListLogLines returns a list of log lines for a given step run. + ListLogLines(tenantId string, opts *ListLogsOpts) (*ListLogsResult, error) +} diff --git a/internal/repository/prisma/dbsqlc/logs.sql b/internal/repository/prisma/dbsqlc/logs.sql new file mode 100644 index 000000000..98400f339 --- /dev/null +++ b/internal/repository/prisma/dbsqlc/logs.sql @@ -0,0 +1,41 @@ +-- name: CreateLogLine :one +INSERT INTO "LogLine" ( + "createdAt", + "tenantId", + "stepRunId", + "message", + "level", + "metadata" +) VALUES ( + coalesce(sqlc.narg('createdAt')::timestamp, now()), + @tenantId::uuid, + @stepRunId::uuid, + @message::text, + coalesce(sqlc.narg('level')::"LogLineLevel", 'INFO'::"LogLineLevel"), + coalesce(sqlc.narg('metadata')::jsonb, '{}'::jsonb) +) RETURNING *; + +-- name: ListLogLines :many +SELECT * FROM "LogLine" +WHERE + "tenantId" = @tenantId::uuid AND + (sqlc.narg('stepRunId')::uuid IS NULL OR "stepRunId" = sqlc.narg('stepRunId')::uuid) AND + (sqlc.narg('search')::text IS NULL OR "message" LIKE concat('%', sqlc.narg('search')::text, '%')) AND + (sqlc.narg('levels')::"LogLineLevel"[] IS NULL OR "level" = ANY(sqlc.narg('levels')::"LogLineLevel"[])) +ORDER BY + CASE WHEN sqlc.narg('orderBy')::text = 'createdAt ASC' THEN "createdAt" END ASC, + CASE WHEN sqlc.narg('orderBy')::text = 'createdAt DESC' THEN "createdAt" END DESC, + -- add order by id to make sure the order is deterministic + CASE WHEN sqlc.narg('orderBy')::text = 'createdAt ASC' THEN "id" END ASC, + CASE WHEN sqlc.narg('orderBy')::text = 'createdAt DESC' THEN "id" END DESC +LIMIT COALESCE(sqlc.narg('limit'), 50) +OFFSET COALESCE(sqlc.narg('offset'), 0); + +-- name: CountLogLines :one +SELECT COUNT(*) AS total +FROM "LogLine" +WHERE + "tenantId" = @tenantId::uuid AND + (sqlc.narg('stepRunId')::uuid IS NULL OR "stepRunId" = sqlc.narg('stepRunId')::uuid) AND + (sqlc.narg('search')::text IS NULL OR "message" LIKE concat('%', sqlc.narg('search')::text, '%')) AND + (sqlc.narg('levels')::"LogLineLevel"[] IS NULL OR "level" = ANY(sqlc.narg('levels')::"LogLineLevel"[])); diff --git a/internal/repository/prisma/dbsqlc/logs.sql.go b/internal/repository/prisma/dbsqlc/logs.sql.go new file mode 100644 index 000000000..1b9dbe809 --- /dev/null +++ b/internal/repository/prisma/dbsqlc/logs.sql.go @@ -0,0 +1,153 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.24.0 +// source: logs.sql + +package dbsqlc + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const countLogLines = `-- name: CountLogLines :one +SELECT COUNT(*) AS total +FROM "LogLine" +WHERE + "tenantId" = $1::uuid AND + ($2::uuid IS NULL OR "stepRunId" = $2::uuid) AND + ($3::text IS NULL OR "message" LIKE concat('%', $3::text, '%')) AND + ($4::"LogLineLevel"[] IS NULL OR "level" = ANY($4::"LogLineLevel"[])) +` + +type CountLogLinesParams struct { + Tenantid pgtype.UUID `json:"tenantid"` + StepRunId pgtype.UUID `json:"stepRunId"` + Search pgtype.Text `json:"search"` + Levels []LogLineLevel `json:"levels"` +} + +func (q *Queries) CountLogLines(ctx context.Context, db DBTX, arg CountLogLinesParams) (int64, error) { + row := db.QueryRow(ctx, countLogLines, + arg.Tenantid, + arg.StepRunId, + arg.Search, + arg.Levels, + ) + var total int64 + err := row.Scan(&total) + return total, err +} + +const createLogLine = `-- name: CreateLogLine :one +INSERT INTO "LogLine" ( + "createdAt", + "tenantId", + "stepRunId", + "message", + "level", + "metadata" +) VALUES ( + coalesce($1::timestamp, now()), + $2::uuid, + $3::uuid, + $4::text, + coalesce($5::"LogLineLevel", 'INFO'::"LogLineLevel"), + coalesce($6::jsonb, '{}'::jsonb) +) RETURNING id, "createdAt", "tenantId", "stepRunId", message, level, metadata +` + +type CreateLogLineParams struct { + CreatedAt pgtype.Timestamp `json:"createdAt"` + Tenantid pgtype.UUID `json:"tenantid"` + Steprunid pgtype.UUID `json:"steprunid"` + Message string `json:"message"` + Level NullLogLineLevel `json:"level"` + Metadata []byte `json:"metadata"` +} + +func (q *Queries) CreateLogLine(ctx context.Context, db DBTX, arg CreateLogLineParams) (*LogLine, error) { + row := db.QueryRow(ctx, createLogLine, + arg.CreatedAt, + arg.Tenantid, + arg.Steprunid, + arg.Message, + arg.Level, + arg.Metadata, + ) + var i LogLine + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.TenantId, + &i.StepRunId, + &i.Message, + &i.Level, + &i.Metadata, + ) + return &i, err +} + +const listLogLines = `-- name: ListLogLines :many +SELECT id, "createdAt", "tenantId", "stepRunId", message, level, metadata FROM "LogLine" +WHERE + "tenantId" = $1::uuid AND + ($2::uuid IS NULL OR "stepRunId" = $2::uuid) AND + ($3::text IS NULL OR "message" LIKE concat('%', $3::text, '%')) AND + ($4::"LogLineLevel"[] IS NULL OR "level" = ANY($4::"LogLineLevel"[])) +ORDER BY + CASE WHEN $5::text = 'createdAt ASC' THEN "createdAt" END ASC, + CASE WHEN $5::text = 'createdAt DESC' THEN "createdAt" END DESC, + -- add order by id to make sure the order is deterministic + CASE WHEN $5::text = 'createdAt ASC' THEN "id" END ASC, + CASE WHEN $5::text = 'createdAt DESC' THEN "id" END DESC +LIMIT COALESCE($7, 50) +OFFSET COALESCE($6, 0) +` + +type ListLogLinesParams struct { + Tenantid pgtype.UUID `json:"tenantid"` + StepRunId pgtype.UUID `json:"stepRunId"` + Search pgtype.Text `json:"search"` + Levels []LogLineLevel `json:"levels"` + OrderBy pgtype.Text `json:"orderBy"` + Offset interface{} `json:"offset"` + Limit interface{} `json:"limit"` +} + +func (q *Queries) ListLogLines(ctx context.Context, db DBTX, arg ListLogLinesParams) ([]*LogLine, error) { + rows, err := db.Query(ctx, listLogLines, + arg.Tenantid, + arg.StepRunId, + arg.Search, + arg.Levels, + arg.OrderBy, + arg.Offset, + arg.Limit, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []*LogLine + for rows.Next() { + var i LogLine + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.TenantId, + &i.StepRunId, + &i.Message, + &i.Level, + &i.Metadata, + ); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/internal/repository/prisma/dbsqlc/models.go b/internal/repository/prisma/dbsqlc/models.go index 23165e018..f016b3dac 100644 --- a/internal/repository/prisma/dbsqlc/models.go +++ b/internal/repository/prisma/dbsqlc/models.go @@ -143,6 +143,50 @@ func (ns NullJobRunStatus) Value() (driver.Value, error) { return string(ns.JobRunStatus), nil } +type LogLineLevel string + +const ( + LogLineLevelDEBUG LogLineLevel = "DEBUG" + LogLineLevelINFO LogLineLevel = "INFO" + LogLineLevelWARN LogLineLevel = "WARN" + LogLineLevelERROR LogLineLevel = "ERROR" +) + +func (e *LogLineLevel) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = LogLineLevel(s) + case string: + *e = LogLineLevel(s) + default: + return fmt.Errorf("unsupported scan type for LogLineLevel: %T", src) + } + return nil +} + +type NullLogLineLevel struct { + LogLineLevel LogLineLevel `json:"LogLineLevel"` + Valid bool `json:"valid"` // Valid is true if LogLineLevel is not NULL +} + +// Scan implements the Scanner interface. +func (ns *NullLogLineLevel) Scan(value interface{}) error { + if value == nil { + ns.LogLineLevel, ns.Valid = "", false + return nil + } + ns.Valid = true + return ns.LogLineLevel.Scan(value) +} + +// Value implements the driver Valuer interface. +func (ns NullLogLineLevel) Value() (driver.Value, error) { + if !ns.Valid { + return nil, nil + } + return string(ns.LogLineLevel), nil +} + type StepRunStatus string const ( @@ -547,6 +591,16 @@ type JobRunLookupData struct { Data []byte `json:"data"` } +type LogLine struct { + ID int64 `json:"id"` + CreatedAt pgtype.Timestamp `json:"createdAt"` + TenantId pgtype.UUID `json:"tenantId"` + StepRunId pgtype.UUID `json:"stepRunId"` + Message string `json:"message"` + Level LogLineLevel `json:"level"` + Metadata []byte `json:"metadata"` +} + type Service struct { ID pgtype.UUID `json:"id"` CreatedAt pgtype.Timestamp `json:"createdAt"` diff --git a/internal/repository/prisma/dbsqlc/schema.sql b/internal/repository/prisma/dbsqlc/schema.sql index 9fb492013..d11932483 100644 --- a/internal/repository/prisma/dbsqlc/schema.sql +++ b/internal/repository/prisma/dbsqlc/schema.sql @@ -7,6 +7,9 @@ CREATE TYPE "InviteLinkStatus" AS ENUM ('PENDING', 'ACCEPTED', 'REJECTED'); -- CreateEnum CREATE TYPE "JobRunStatus" AS ENUM ('PENDING', 'RUNNING', 'SUCCEEDED', 'FAILED', 'CANCELLED'); +-- CreateEnum +CREATE TYPE "LogLineLevel" AS ENUM ('DEBUG', 'INFO', 'WARN', 'ERROR'); + -- CreateEnum CREATE TYPE "StepRunStatus" AS ENUM ('PENDING', 'PENDING_ASSIGNMENT', 'ASSIGNED', 'RUNNING', 'SUCCEEDED', 'FAILED', 'CANCELLED'); @@ -227,6 +230,19 @@ CREATE TABLE "JobRunLookupData" ( CONSTRAINT "JobRunLookupData_pkey" PRIMARY KEY ("id") ); +-- CreateTable +CREATE TABLE "LogLine" ( + "id" BIGSERIAL NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "tenantId" UUID NOT NULL, + "stepRunId" UUID, + "message" TEXT NOT NULL, + "level" "LogLineLevel" NOT NULL DEFAULT 'INFO', + "metadata" JSONB, + + CONSTRAINT "LogLine_pkey" PRIMARY KEY ("id") +); + -- CreateTable CREATE TABLE "Service" ( "id" UUID NOT NULL, @@ -928,6 +944,12 @@ ALTER TABLE "JobRunLookupData" ADD CONSTRAINT "JobRunLookupData_jobRunId_fkey" F -- AddForeignKey ALTER TABLE "JobRunLookupData" ADD CONSTRAINT "JobRunLookupData_tenantId_fkey" FOREIGN KEY ("tenantId") REFERENCES "Tenant"("id") ON DELETE CASCADE ON UPDATE CASCADE; +-- AddForeignKey +ALTER TABLE "LogLine" ADD CONSTRAINT "LogLine_stepRunId_fkey" FOREIGN KEY ("stepRunId") REFERENCES "StepRun"("id") ON DELETE SET NULL ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "LogLine" ADD CONSTRAINT "LogLine_tenantId_fkey" FOREIGN KEY ("tenantId") REFERENCES "Tenant"("id") ON DELETE CASCADE ON UPDATE CASCADE; + -- AddForeignKey ALTER TABLE "Service" ADD CONSTRAINT "Service_tenantId_fkey" FOREIGN KEY ("tenantId") REFERENCES "Tenant"("id") ON DELETE CASCADE ON UPDATE CASCADE; diff --git a/internal/repository/prisma/dbsqlc/sqlc.yaml b/internal/repository/prisma/dbsqlc/sqlc.yaml index 11626c680..e2a1a18ab 100644 --- a/internal/repository/prisma/dbsqlc/sqlc.yaml +++ b/internal/repository/prisma/dbsqlc/sqlc.yaml @@ -14,6 +14,7 @@ sql: - tickers.sql - dispatchers.sql - workers.sql + - logs.sql schema: - schema.sql strict_order_by: false diff --git a/internal/repository/prisma/log.go b/internal/repository/prisma/log.go new file mode 100644 index 000000000..a5f02609a --- /dev/null +++ b/internal/repository/prisma/log.go @@ -0,0 +1,190 @@ +package prisma + +import ( + "context" + "errors" + "fmt" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" + "github.com/rs/zerolog" + + "github.com/hatchet-dev/hatchet/internal/repository" + "github.com/hatchet-dev/hatchet/internal/repository/prisma/db" + "github.com/hatchet-dev/hatchet/internal/repository/prisma/dbsqlc" + "github.com/hatchet-dev/hatchet/internal/repository/prisma/sqlchelpers" + "github.com/hatchet-dev/hatchet/internal/validator" +) + +type logRepository struct { + client *db.PrismaClient + pool *pgxpool.Pool + v validator.Validator + queries *dbsqlc.Queries + l *zerolog.Logger +} + +func NewLogRepository(client *db.PrismaClient, pool *pgxpool.Pool, v validator.Validator, l *zerolog.Logger) repository.LogsRepository { + queries := dbsqlc.New() + + return &logRepository{ + client: client, + pool: pool, + v: v, + queries: queries, + l: l, + } +} + +func (r *logRepository) PutLog(tenantId string, opts *repository.CreateLogLineOpts) (*dbsqlc.LogLine, error) { + if err := r.v.Validate(opts); err != nil { + return nil, err + } + + createParams := dbsqlc.CreateLogLineParams{ + Tenantid: sqlchelpers.UUIDFromStr(tenantId), + Message: opts.Message, + Steprunid: sqlchelpers.UUIDFromStr(opts.StepRunId), + } + + if opts.CreatedAt != nil { + utcTime := opts.CreatedAt.UTC() + createParams.CreatedAt = sqlchelpers.TimestampFromTime(utcTime) + } + + if opts.Level != nil { + createParams.Level = dbsqlc.NullLogLineLevel{ + LogLineLevel: dbsqlc.LogLineLevel(*opts.Level), + Valid: true, + } + } + + if opts.Metadata != nil { + createParams.Metadata = opts.Metadata + } + + tx, err := r.pool.Begin(context.Background()) + + if err != nil { + return nil, err + } + + defer deferRollback(context.Background(), r.l, tx.Rollback) + + logLine, err := r.queries.CreateLogLine( + context.Background(), + tx, + createParams, + ) + + if err != nil { + return nil, fmt.Errorf("could not create log line: %w", err) + } + + err = tx.Commit(context.Background()) + + if err != nil { + return nil, fmt.Errorf("could not commit transaction: %w", err) + } + + return logLine, nil +} + +func (r *logRepository) ListLogLines(tenantId string, opts *repository.ListLogsOpts) (*repository.ListLogsResult, error) { + if err := r.v.Validate(opts); err != nil { + return nil, err + } + + res := &repository.ListLogsResult{} + + pgTenantId := sqlchelpers.UUIDFromStr(tenantId) + + queryParams := dbsqlc.ListLogLinesParams{ + Tenantid: pgTenantId, + } + + countParams := dbsqlc.CountLogLinesParams{ + Tenantid: pgTenantId, + } + + if opts.Search != nil { + queryParams.Search = sqlchelpers.TextFromStr(*opts.Search) + countParams.Search = sqlchelpers.TextFromStr(*opts.Search) + } + + if opts.Offset != nil { + queryParams.Offset = *opts.Offset + } + + if opts.Limit != nil { + queryParams.Limit = *opts.Limit + } + + if opts.StepRunId != nil { + queryParams.StepRunId = sqlchelpers.UUIDFromStr(*opts.StepRunId) + countParams.StepRunId = sqlchelpers.UUIDFromStr(*opts.StepRunId) + } + + if opts.Levels != nil { + var levels []dbsqlc.LogLineLevel + + for _, level := range opts.Levels { + levels = append(levels, dbsqlc.LogLineLevel(level)) + } + + queryParams.Levels = levels + countParams.Levels = levels + } + + orderByField := "createdAt" + orderByDirection := "DESC" + + if opts.OrderBy != nil { + orderByField = *opts.OrderBy + } + + if opts.OrderDirection != nil { + orderByDirection = *opts.OrderDirection + } + + queryParams.OrderBy = sqlchelpers.TextFromStr(orderByField + " " + orderByDirection) + + tx, err := r.pool.Begin(context.Background()) + + if err != nil { + return nil, err + } + + defer deferRollback(context.Background(), r.l, tx.Rollback) + + logLines, err := r.queries.ListLogLines(context.Background(), tx, queryParams) + + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + logLines = make([]*dbsqlc.LogLine, 0) + } else { + return nil, fmt.Errorf("could not list log lines: %w", err) + } + } + + count, err := r.queries.CountLogLines(context.Background(), tx, countParams) + + if err != nil { + if errors.Is(err, pgx.ErrNoRows) { + count = 0 + } else { + return nil, fmt.Errorf("could not count events: %w", err) + } + } + + err = tx.Commit(context.Background()) + + if err != nil { + return nil, fmt.Errorf("could not commit transaction: %w", err) + } + + res.Rows = logLines + res.Count = int(count) + + return res, nil +} diff --git a/internal/repository/prisma/repository.go b/internal/repository/prisma/repository.go index b9dd04071..1f12a4b07 100644 --- a/internal/repository/prisma/repository.go +++ b/internal/repository/prisma/repository.go @@ -12,6 +12,7 @@ import ( type prismaRepository struct { apiToken repository.APITokenRepository event repository.EventRepository + log repository.LogsRepository tenant repository.TenantRepository tenantInvite repository.TenantInviteRepository workflow repository.WorkflowRepository @@ -67,6 +68,7 @@ func NewPrismaRepository(client *db.PrismaClient, pool *pgxpool.Pool, fs ...Pris return &prismaRepository{ apiToken: NewAPITokenRepository(client, opts.v), event: NewEventRepository(client, pool, opts.v, opts.l), + log: NewLogRepository(client, pool, opts.v, opts.l), tenant: NewTenantRepository(client, opts.v), tenantInvite: NewTenantInviteRepository(client, opts.v), workflow: NewWorkflowRepository(client, pool, opts.v, opts.l), @@ -97,6 +99,10 @@ func (r *prismaRepository) Event() repository.EventRepository { return r.event } +func (r *prismaRepository) Log() repository.LogsRepository { + return r.log +} + func (r *prismaRepository) Tenant() repository.TenantRepository { return r.tenant } diff --git a/internal/repository/repository.go b/internal/repository/repository.go index f05f7b7ca..b108a1609 100644 --- a/internal/repository/repository.go +++ b/internal/repository/repository.go @@ -4,6 +4,7 @@ type Repository interface { Health() HealthRepository APIToken() APITokenRepository Event() EventRepository + Log() LogsRepository Tenant() TenantRepository TenantInvite() TenantInviteRepository Workflow() WorkflowRepository diff --git a/internal/services/ingestor/contracts/events.pb.go b/internal/services/ingestor/contracts/events.pb.go index 93b3c91a7..ccad481c0 100644 --- a/internal/services/ingestor/contracts/events.pb.go +++ b/internal/services/ingestor/contracts/events.pb.go @@ -105,6 +105,128 @@ func (x *Event) GetEventTimestamp() *timestamppb.Timestamp { return nil } +type PutLogRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // the step run id for the request + StepRunId string `protobuf:"bytes,1,opt,name=stepRunId,proto3" json:"stepRunId,omitempty"` + // when the log line was created + CreatedAt *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=createdAt,proto3" json:"createdAt,omitempty"` + // the log line message + Message string `protobuf:"bytes,3,opt,name=message,proto3" json:"message,omitempty"` + // the log line level + Level *string `protobuf:"bytes,4,opt,name=level,proto3,oneof" json:"level,omitempty"` + // associated log line metadata + Metadata string `protobuf:"bytes,5,opt,name=metadata,proto3" json:"metadata,omitempty"` +} + +func (x *PutLogRequest) Reset() { + *x = PutLogRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_events_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PutLogRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PutLogRequest) ProtoMessage() {} + +func (x *PutLogRequest) ProtoReflect() protoreflect.Message { + mi := &file_events_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PutLogRequest.ProtoReflect.Descriptor instead. +func (*PutLogRequest) Descriptor() ([]byte, []int) { + return file_events_proto_rawDescGZIP(), []int{1} +} + +func (x *PutLogRequest) GetStepRunId() string { + if x != nil { + return x.StepRunId + } + return "" +} + +func (x *PutLogRequest) GetCreatedAt() *timestamppb.Timestamp { + if x != nil { + return x.CreatedAt + } + return nil +} + +func (x *PutLogRequest) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *PutLogRequest) GetLevel() string { + if x != nil && x.Level != nil { + return *x.Level + } + return "" +} + +func (x *PutLogRequest) GetMetadata() string { + if x != nil { + return x.Metadata + } + return "" +} + +type PutLogResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *PutLogResponse) Reset() { + *x = PutLogResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_events_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *PutLogResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*PutLogResponse) ProtoMessage() {} + +func (x *PutLogResponse) ProtoReflect() protoreflect.Message { + mi := &file_events_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use PutLogResponse.ProtoReflect.Descriptor instead. +func (*PutLogResponse) Descriptor() ([]byte, []int) { + return file_events_proto_rawDescGZIP(), []int{2} +} + type PushEventRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -121,7 +243,7 @@ type PushEventRequest struct { func (x *PushEventRequest) Reset() { *x = PushEventRequest{} if protoimpl.UnsafeEnabled { - mi := &file_events_proto_msgTypes[1] + mi := &file_events_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -134,7 +256,7 @@ func (x *PushEventRequest) String() string { func (*PushEventRequest) ProtoMessage() {} func (x *PushEventRequest) ProtoReflect() protoreflect.Message { - mi := &file_events_proto_msgTypes[1] + mi := &file_events_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -147,7 +269,7 @@ func (x *PushEventRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use PushEventRequest.ProtoReflect.Descriptor instead. func (*PushEventRequest) Descriptor() ([]byte, []int) { - return file_events_proto_rawDescGZIP(), []int{1} + return file_events_proto_rawDescGZIP(), []int{3} } func (x *PushEventRequest) GetKey() string { @@ -185,7 +307,7 @@ type ListEventRequest struct { func (x *ListEventRequest) Reset() { *x = ListEventRequest{} if protoimpl.UnsafeEnabled { - mi := &file_events_proto_msgTypes[2] + mi := &file_events_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -198,7 +320,7 @@ func (x *ListEventRequest) String() string { func (*ListEventRequest) ProtoMessage() {} func (x *ListEventRequest) ProtoReflect() protoreflect.Message { - mi := &file_events_proto_msgTypes[2] + mi := &file_events_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -211,7 +333,7 @@ func (x *ListEventRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ListEventRequest.ProtoReflect.Descriptor instead. func (*ListEventRequest) Descriptor() ([]byte, []int) { - return file_events_proto_rawDescGZIP(), []int{2} + return file_events_proto_rawDescGZIP(), []int{4} } func (x *ListEventRequest) GetOffset() int32 { @@ -240,7 +362,7 @@ type ListEventResponse struct { func (x *ListEventResponse) Reset() { *x = ListEventResponse{} if protoimpl.UnsafeEnabled { - mi := &file_events_proto_msgTypes[3] + mi := &file_events_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -253,7 +375,7 @@ func (x *ListEventResponse) String() string { func (*ListEventResponse) ProtoMessage() {} func (x *ListEventResponse) ProtoReflect() protoreflect.Message { - mi := &file_events_proto_msgTypes[3] + mi := &file_events_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -266,7 +388,7 @@ func (x *ListEventResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use ListEventResponse.ProtoReflect.Descriptor instead. func (*ListEventResponse) Descriptor() ([]byte, []int) { - return file_events_proto_rawDescGZIP(), []int{3} + return file_events_proto_rawDescGZIP(), []int{5} } func (x *ListEventResponse) GetEvents() []*Event { @@ -288,7 +410,7 @@ type ReplayEventRequest struct { func (x *ReplayEventRequest) Reset() { *x = ReplayEventRequest{} if protoimpl.UnsafeEnabled { - mi := &file_events_proto_msgTypes[4] + mi := &file_events_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -301,7 +423,7 @@ func (x *ReplayEventRequest) String() string { func (*ReplayEventRequest) ProtoMessage() {} func (x *ReplayEventRequest) ProtoReflect() protoreflect.Message { - mi := &file_events_proto_msgTypes[4] + mi := &file_events_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -314,7 +436,7 @@ func (x *ReplayEventRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use ReplayEventRequest.ProtoReflect.Descriptor instead. func (*ReplayEventRequest) Descriptor() ([]byte, []int) { - return file_events_proto_rawDescGZIP(), []int{4} + return file_events_proto_rawDescGZIP(), []int{6} } func (x *ReplayEventRequest) GetEventId() string { @@ -341,40 +463,56 @@ var file_events_proto_rawDesc = []byte{ 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, - 0x82, 0x01, 0x0a, 0x10, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, - 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, - 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, - 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x22, 0x3c, 0x0a, 0x10, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, - 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, - 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, - 0x65, 0x79, 0x22, 0x33, 0x0a, 0x11, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, - 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x2e, 0x0a, 0x12, 0x52, 0x65, 0x70, 0x6c, 0x61, - 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, - 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, - 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x32, 0x99, 0x01, 0x0a, 0x0d, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x23, 0x0a, 0x04, 0x50, 0x75, 0x73, - 0x68, 0x12, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2f, - 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x11, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x12, 0x2e, 0x4c, 0x69, 0x73, 0x74, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, - 0x32, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x22, 0x00, 0x42, 0x47, 0x5a, 0x45, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, - 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, - 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x64, 0x69, 0x73, 0x70, 0x61, 0x74, 0x63, 0x68, - 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x33, + 0xc2, 0x01, 0x0a, 0x0d, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, + 0x38, 0x0a, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, + 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, + 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, + 0x61, 0x67, 0x65, 0x12, 0x19, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x1a, + 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c, + 0x65, 0x76, 0x65, 0x6c, 0x22, 0x10, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x82, 0x01, 0x0a, 0x10, 0x50, 0x75, 0x73, 0x68, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, + 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, + 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x3c, 0x0a, 0x10, 0x4c, + 0x69, 0x73, 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, + 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x22, 0x33, 0x0a, 0x11, 0x4c, 0x69, 0x73, + 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1e, + 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x06, + 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x2e, + 0x0a, 0x12, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x32, 0xc6, + 0x01, 0x0a, 0x0d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, + 0x12, 0x23, 0x0a, 0x04, 0x50, 0x75, 0x73, 0x68, 0x12, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2f, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x11, 0x2e, + 0x4c, 0x69, 0x73, 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x12, 0x2e, 0x4c, 0x69, 0x73, 0x74, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x32, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, + 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, + 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2b, 0x0a, 0x06, 0x50, 0x75, + 0x74, 0x4c, 0x6f, 0x67, 0x12, 0x0e, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x47, 0x5a, 0x45, 0x67, 0x69, 0x74, 0x68, 0x75, + 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, + 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x64, 0x69, 0x73, 0x70, + 0x61, 0x74, 0x63, 0x68, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, + 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -389,30 +527,35 @@ func file_events_proto_rawDescGZIP() []byte { return file_events_proto_rawDescData } -var file_events_proto_msgTypes = make([]protoimpl.MessageInfo, 5) +var file_events_proto_msgTypes = make([]protoimpl.MessageInfo, 7) var file_events_proto_goTypes = []interface{}{ (*Event)(nil), // 0: Event - (*PushEventRequest)(nil), // 1: PushEventRequest - (*ListEventRequest)(nil), // 2: ListEventRequest - (*ListEventResponse)(nil), // 3: ListEventResponse - (*ReplayEventRequest)(nil), // 4: ReplayEventRequest - (*timestamppb.Timestamp)(nil), // 5: google.protobuf.Timestamp + (*PutLogRequest)(nil), // 1: PutLogRequest + (*PutLogResponse)(nil), // 2: PutLogResponse + (*PushEventRequest)(nil), // 3: PushEventRequest + (*ListEventRequest)(nil), // 4: ListEventRequest + (*ListEventResponse)(nil), // 5: ListEventResponse + (*ReplayEventRequest)(nil), // 6: ReplayEventRequest + (*timestamppb.Timestamp)(nil), // 7: google.protobuf.Timestamp } var file_events_proto_depIdxs = []int32{ - 5, // 0: Event.eventTimestamp:type_name -> google.protobuf.Timestamp - 5, // 1: PushEventRequest.eventTimestamp:type_name -> google.protobuf.Timestamp - 0, // 2: ListEventResponse.events:type_name -> Event - 1, // 3: EventsService.Push:input_type -> PushEventRequest - 2, // 4: EventsService.List:input_type -> ListEventRequest - 4, // 5: EventsService.ReplaySingleEvent:input_type -> ReplayEventRequest - 0, // 6: EventsService.Push:output_type -> Event - 3, // 7: EventsService.List:output_type -> ListEventResponse - 0, // 8: EventsService.ReplaySingleEvent:output_type -> Event - 6, // [6:9] is the sub-list for method output_type - 3, // [3:6] is the sub-list for method input_type - 3, // [3:3] is the sub-list for extension type_name - 3, // [3:3] is the sub-list for extension extendee - 0, // [0:3] is the sub-list for field type_name + 7, // 0: Event.eventTimestamp:type_name -> google.protobuf.Timestamp + 7, // 1: PutLogRequest.createdAt:type_name -> google.protobuf.Timestamp + 7, // 2: PushEventRequest.eventTimestamp:type_name -> google.protobuf.Timestamp + 0, // 3: ListEventResponse.events:type_name -> Event + 3, // 4: EventsService.Push:input_type -> PushEventRequest + 4, // 5: EventsService.List:input_type -> ListEventRequest + 6, // 6: EventsService.ReplaySingleEvent:input_type -> ReplayEventRequest + 1, // 7: EventsService.PutLog:input_type -> PutLogRequest + 0, // 8: EventsService.Push:output_type -> Event + 5, // 9: EventsService.List:output_type -> ListEventResponse + 0, // 10: EventsService.ReplaySingleEvent:output_type -> Event + 2, // 11: EventsService.PutLog:output_type -> PutLogResponse + 8, // [8:12] is the sub-list for method output_type + 4, // [4:8] is the sub-list for method input_type + 4, // [4:4] is the sub-list for extension type_name + 4, // [4:4] is the sub-list for extension extendee + 0, // [0:4] is the sub-list for field type_name } func init() { file_events_proto_init() } @@ -434,7 +577,7 @@ func file_events_proto_init() { } } file_events_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*PushEventRequest); i { + switch v := v.(*PutLogRequest); i { case 0: return &v.state case 1: @@ -446,7 +589,7 @@ func file_events_proto_init() { } } file_events_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListEventRequest); i { + switch v := v.(*PutLogResponse); i { case 0: return &v.state case 1: @@ -458,7 +601,7 @@ func file_events_proto_init() { } } file_events_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ListEventResponse); i { + switch v := v.(*PushEventRequest); i { case 0: return &v.state case 1: @@ -470,6 +613,30 @@ func file_events_proto_init() { } } file_events_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListEventRequest); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_events_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*ListEventResponse); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_events_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ReplayEventRequest); i { case 0: return &v.state @@ -482,13 +649,14 @@ func file_events_proto_init() { } } } + file_events_proto_msgTypes[1].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_events_proto_rawDesc, NumEnums: 0, - NumMessages: 5, + NumMessages: 7, NumExtensions: 0, NumServices: 1, }, diff --git a/internal/services/ingestor/contracts/events_grpc.pb.go b/internal/services/ingestor/contracts/events_grpc.pb.go index 6a009cc8a..0acd8ad13 100644 --- a/internal/services/ingestor/contracts/events_grpc.pb.go +++ b/internal/services/ingestor/contracts/events_grpc.pb.go @@ -25,6 +25,7 @@ type EventsServiceClient interface { Push(ctx context.Context, in *PushEventRequest, opts ...grpc.CallOption) (*Event, error) List(ctx context.Context, in *ListEventRequest, opts ...grpc.CallOption) (*ListEventResponse, error) ReplaySingleEvent(ctx context.Context, in *ReplayEventRequest, opts ...grpc.CallOption) (*Event, error) + PutLog(ctx context.Context, in *PutLogRequest, opts ...grpc.CallOption) (*PutLogResponse, error) } type eventsServiceClient struct { @@ -62,6 +63,15 @@ func (c *eventsServiceClient) ReplaySingleEvent(ctx context.Context, in *ReplayE return out, nil } +func (c *eventsServiceClient) PutLog(ctx context.Context, in *PutLogRequest, opts ...grpc.CallOption) (*PutLogResponse, error) { + out := new(PutLogResponse) + err := c.cc.Invoke(ctx, "/EventsService/PutLog", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // EventsServiceServer is the server API for EventsService service. // All implementations must embed UnimplementedEventsServiceServer // for forward compatibility @@ -69,6 +79,7 @@ type EventsServiceServer interface { Push(context.Context, *PushEventRequest) (*Event, error) List(context.Context, *ListEventRequest) (*ListEventResponse, error) ReplaySingleEvent(context.Context, *ReplayEventRequest) (*Event, error) + PutLog(context.Context, *PutLogRequest) (*PutLogResponse, error) mustEmbedUnimplementedEventsServiceServer() } @@ -85,6 +96,9 @@ func (UnimplementedEventsServiceServer) List(context.Context, *ListEventRequest) func (UnimplementedEventsServiceServer) ReplaySingleEvent(context.Context, *ReplayEventRequest) (*Event, error) { return nil, status.Errorf(codes.Unimplemented, "method ReplaySingleEvent not implemented") } +func (UnimplementedEventsServiceServer) PutLog(context.Context, *PutLogRequest) (*PutLogResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method PutLog not implemented") +} func (UnimplementedEventsServiceServer) mustEmbedUnimplementedEventsServiceServer() {} // UnsafeEventsServiceServer may be embedded to opt out of forward compatibility for this service. @@ -152,6 +166,24 @@ func _EventsService_ReplaySingleEvent_Handler(srv interface{}, ctx context.Conte return interceptor(ctx, in, info, handler) } +func _EventsService_PutLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PutLogRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventsServiceServer).PutLog(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/EventsService/PutLog", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventsServiceServer).PutLog(ctx, req.(*PutLogRequest)) + } + return interceptor(ctx, in, info, handler) +} + // EventsService_ServiceDesc is the grpc.ServiceDesc for EventsService service. // It's only intended for direct use with grpc.RegisterService, // and not to be introspected or modified (even as a copy) @@ -171,6 +203,10 @@ var EventsService_ServiceDesc = grpc.ServiceDesc{ MethodName: "ReplaySingleEvent", Handler: _EventsService_ReplaySingleEvent_Handler, }, + { + MethodName: "PutLog", + Handler: _EventsService_PutLog_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "events.proto", diff --git a/internal/services/ingestor/ingestor.go b/internal/services/ingestor/ingestor.go index 5c17c73af..91d3471df 100644 --- a/internal/services/ingestor/ingestor.go +++ b/internal/services/ingestor/ingestor.go @@ -25,6 +25,7 @@ type IngestorOptFunc func(*IngestorOpts) type IngestorOpts struct { eventRepository repository.EventRepository + logRepository repository.LogsRepository taskQueue taskqueue.TaskQueue } @@ -34,6 +35,12 @@ func WithEventRepository(r repository.EventRepository) IngestorOptFunc { } } +func WithLogRepository(r repository.LogsRepository) IngestorOptFunc { + return func(opts *IngestorOpts) { + opts.logRepository = r + } +} + func WithTaskQueue(tq taskqueue.TaskQueue) IngestorOptFunc { return func(opts *IngestorOpts) { opts.taskQueue = tq @@ -48,6 +55,7 @@ type IngestorImpl struct { contracts.UnimplementedEventsServiceServer eventRepository repository.EventRepository + logRepository repository.LogsRepository tq taskqueue.TaskQueue } @@ -62,12 +70,17 @@ func NewIngestor(fs ...IngestorOptFunc) (Ingestor, error) { return nil, fmt.Errorf("event repository is required. use WithEventRepository") } + if opts.logRepository == nil { + return nil, fmt.Errorf("log repository is required. use WithLogRepository") + } + if opts.taskQueue == nil { return nil, fmt.Errorf("task queue is required. use WithTaskQueue") } return &IngestorImpl{ eventRepository: opts.eventRepository, + logRepository: opts.logRepository, tq: opts.taskQueue, }, nil } diff --git a/internal/services/ingestor/server.go b/internal/services/ingestor/server.go index 3673dfc17..8e34ccfc7 100644 --- a/internal/services/ingestor/server.go +++ b/internal/services/ingestor/server.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "time" "google.golang.org/protobuf/types/known/timestamppb" @@ -101,6 +102,36 @@ func (i *IngestorImpl) ReplaySingleEvent(ctx context.Context, req *contracts.Rep return e, nil } +func (i *IngestorImpl) PutLog(ctx context.Context, req *contracts.PutLogRequest) (*contracts.PutLogResponse, error) { + tenant := ctx.Value("tenant").(*db.TenantModel) + + var createdAt *time.Time + + if t := req.CreatedAt.AsTime(); !t.IsZero() { + createdAt = &t + } + + var metadata []byte + + if req.Metadata != "" { + metadata = []byte(req.Metadata) + } + + _, err := i.logRepository.PutLog(tenant.ID, &repository.CreateLogLineOpts{ + StepRunId: req.StepRunId, + CreatedAt: createdAt, + Message: req.Message, + Level: req.Level, + Metadata: metadata, + }) + + if err != nil { + return nil, err + } + + return &contracts.PutLogResponse{}, nil +} + func toEventFromSQLC(eventRow *dbsqlc.ListEventsRow) (*contracts.Event, error) { event := eventRow.Event diff --git a/prisma/migrations/20240229232811_v0_14_0/migration.sql b/prisma/migrations/20240229232811_v0_14_0/migration.sql new file mode 100644 index 000000000..d1e9d658b --- /dev/null +++ b/prisma/migrations/20240229232811_v0_14_0/migration.sql @@ -0,0 +1,21 @@ +-- CreateEnum +CREATE TYPE "LogLineLevel" AS ENUM ('DEBUG', 'INFO', 'WARN', 'ERROR'); + +-- CreateTable +CREATE TABLE + "LogLine" ( + "id" BIGSERIAL NOT NULL, + "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, + "tenantId" UUID NOT NULL, + "stepRunId" UUID, + "message" TEXT NOT NULL, + "level" "LogLineLevel" NOT NULL DEFAULT 'INFO', + "metadata" JSONB, + CONSTRAINT "LogLine_pkey" PRIMARY KEY ("id") + ); + +-- AddForeignKey +ALTER TABLE "LogLine" ADD CONSTRAINT "LogLine_tenantId_fkey" FOREIGN KEY ("tenantId") REFERENCES "Tenant" ("id") ON DELETE CASCADE ON UPDATE CASCADE; + +-- AddForeignKey +ALTER TABLE "LogLine" ADD CONSTRAINT "LogLine_stepRunId_fkey" FOREIGN KEY ("stepRunId") REFERENCES "StepRun" ("id") ON DELETE SET NULL ON UPDATE CASCADE; \ No newline at end of file diff --git a/prisma/schema.prisma b/prisma/schema.prisma index 80790b430..5b0554855 100644 --- a/prisma/schema.prisma +++ b/prisma/schema.prisma @@ -125,6 +125,7 @@ model Tenant { githubPullRequests GithubPullRequest[] githubPullRequestComments GithubPullRequestComment[] githubWebhooks GithubWebhook[] + logs LogLine[] } enum TenantMemberRole { @@ -868,6 +869,8 @@ model StepRun { gitRepoBranch String? archivedResults StepRunResultArchive[] + + logs LogLine[] } model StepRunResultArchive { @@ -1184,3 +1187,33 @@ model GithubWebhook { @@unique([tenantId, repositoryOwner, repositoryName]) } + +enum LogLineLevel { + DEBUG + INFO + WARN + ERROR +} + +model LogLine { + // base fields + id BigInt @id @default(autoincrement()) @db.BigInt + createdAt DateTime @default(now()) + + // the parent tenant + tenant Tenant @relation(fields: [tenantId], references: [id], onDelete: Cascade, onUpdate: Cascade) + tenantId String @db.Uuid + + // the step run id this log is associated with + stepRun StepRun? @relation(fields: [stepRunId], references: [id], onDelete: SetNull, onUpdate: Cascade) + stepRunId String? @db.Uuid + + // the log line message + message String + + // the log line level + level LogLineLevel @default(INFO) + + // (optional) the log line metadata + metadata Json? +} diff --git a/python-sdk/examples/logger/worker.py b/python-sdk/examples/logger/worker.py new file mode 100644 index 000000000..77a688abe --- /dev/null +++ b/python-sdk/examples/logger/worker.py @@ -0,0 +1,25 @@ +import time +from hatchet_sdk import Hatchet, Context +from dotenv import load_dotenv + +load_dotenv() + +hatchet = Hatchet() + +@hatchet.workflow(on_events=["user:create"],schedule_timeout="10m") +class LoggingWorkflow: + @hatchet.step() + def logger(self, context : Context): + + for i in range(1000): + context.log(f"Logging message {i}") + + return { + "step1": "completed", + } + +workflow = LoggingWorkflow() +worker = hatchet.worker('logging-worker-py') +worker.register_workflow(workflow) + +worker.start() \ No newline at end of file diff --git a/python-sdk/hatchet_sdk/clients/events.py b/python-sdk/hatchet_sdk/clients/events.py index 8297129c6..e5a6e1381 100644 --- a/python-sdk/hatchet_sdk/clients/events.py +++ b/python-sdk/hatchet_sdk/clients/events.py @@ -1,5 +1,5 @@ from ..events_pb2_grpc import EventsServiceStub -from ..events_pb2 import PushEventRequest +from ..events_pb2 import PushEventRequest, PutLogRequest import datetime from ..loader import ClientConfig @@ -14,6 +14,13 @@ def new_event(conn, config: ClientConfig): token=config.token, ) +def proto_timestamp_now(): + t = datetime.datetime.now().timestamp() + seconds = int(t) + nanos = int(t % 1 * 1e9) + + return timestamp_pb2.Timestamp(seconds=seconds, nanos=nanos) + class EventClientImpl: def __init__(self, client, token): self.client = client @@ -28,10 +35,22 @@ class EventClientImpl: request = PushEventRequest( key=event_key, payload=payload_bytes, - eventTimestamp=timestamp_pb2.Timestamp().FromDatetime(datetime.datetime.now()), + eventTimestamp=proto_timestamp_now(), ) try: self.client.Push(request, metadata=get_metadata(self.token)) except grpc.RpcError as e: raise ValueError(f"gRPC error: {e}") + + def log(self, message: str, step_run_id: str): + try: + request = PutLogRequest( + stepRunId=step_run_id, + createdAt=proto_timestamp_now(), + message=message, + ) + + self.client.PutLog(request, metadata=get_metadata(self.token)) + except Exception as e: + raise ValueError(f"Error logging: {e}") \ No newline at end of file diff --git a/python-sdk/hatchet_sdk/context.py b/python-sdk/hatchet_sdk/context.py index 3a3897a34..c3ac88bdb 100644 --- a/python-sdk/hatchet_sdk/context.py +++ b/python-sdk/hatchet_sdk/context.py @@ -1,8 +1,13 @@ +from concurrent.futures import ThreadPoolExecutor +import datetime import inspect from multiprocessing import Event import os from .clients.dispatcher import Action, DispatcherClient +from google.protobuf import timestamp_pb2 +from .clients.events import EventClientImpl from .dispatcher_pb2 import OverridesData +from .events_pb2 import PutLogRequest from .logger import logger import json @@ -12,7 +17,7 @@ def get_caller_file_path(): return caller_frame.filename class Context: - def __init__(self, action: Action, client: DispatcherClient): + def __init__(self, action: Action, client: DispatcherClient, eventClient: EventClientImpl): try: self.data = json.loads(action.action_payload) except Exception as e: @@ -21,6 +26,11 @@ class Context: self.stepRunId = action.step_run_id self.exit_flag = Event() self.client = client + self.eventClient = eventClient + + # FIXME: this limits the number of concurrent log requests to 1, which means we can do about + # 100 log lines per second but this depends on network. + self.logger_thread_pool = ThreadPoolExecutor(max_workers=1) # store each key in the overrides field in a lookup table # overrides_data is a dictionary of key-value pairs @@ -73,4 +83,16 @@ class Context: ) ) - return default \ No newline at end of file + return default + + def _log(self, line: str): + try: + self.eventClient.log(message=line, step_run_id=self.stepRunId) + except Exception as e: + logger.error(f"Error logging: {e}") + + def log(self, line: str): + if self.stepRunId == "": + return + + self.logger_thread_pool.submit(self._log, line) diff --git a/python-sdk/hatchet_sdk/dispatcher_pb2.py b/python-sdk/hatchet_sdk/dispatcher_pb2.py index bb37c5efc..5984c6f25 100644 --- a/python-sdk/hatchet_sdk/dispatcher_pb2.py +++ b/python-sdk/hatchet_sdk/dispatcher_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"p\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x42\n\n\x08_maxRuns\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\xf2\x01\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xec\x01\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"9\n SubscribeToWorkflowEventsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xe0\x01\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\x8a\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xde\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x32\xe4\x03\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"p\n\x15WorkerRegisterRequest\x12\x12\n\nworkerName\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x14\n\x07maxRuns\x18\x04 \x01(\x05H\x00\x88\x01\x01\x42\n\n\x08_maxRuns\"P\n\x16WorkerRegisterResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\x12\x12\n\nworkerName\x18\x03 \x01(\t\"\x84\x02\n\x0e\x41ssignedAction\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\r\n\x05jobId\x18\x04 \x01(\t\x12\x0f\n\x07jobName\x18\x05 \x01(\t\x12\x10\n\x08jobRunId\x18\x06 \x01(\t\x12\x0e\n\x06stepId\x18\x07 \x01(\t\x12\x11\n\tstepRunId\x18\x08 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\t \x01(\t\x12\x1f\n\nactionType\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x15\n\ractionPayload\x18\x0b \x01(\t\x12\x10\n\x08stepName\x18\x0c \x01(\t\"\'\n\x13WorkerListenRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\",\n\x18WorkerUnsubscribeRequest\x12\x10\n\x08workerId\x18\x01 \x01(\t\"?\n\x19WorkerUnsubscribeResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"\xe1\x01\n\x13GroupKeyActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\x15\n\rworkflowRunId\x18\x02 \x01(\t\x12\x18\n\x10getGroupKeyRunId\x18\x03 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\teventType\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x14\n\x0c\x65ventPayload\x18\x07 \x01(\t\"\xec\x01\n\x0fStepActionEvent\x12\x10\n\x08workerId\x18\x01 \x01(\t\x12\r\n\x05jobId\x18\x02 \x01(\t\x12\x10\n\x08jobRunId\x18\x03 \x01(\t\x12\x0e\n\x06stepId\x18\x04 \x01(\t\x12\x11\n\tstepRunId\x18\x05 \x01(\t\x12\x10\n\x08\x61\x63tionId\x18\x06 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\teventType\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x14\n\x0c\x65ventPayload\x18\t \x01(\t\"9\n\x13\x41\x63tionEventResponse\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x10\n\x08workerId\x18\x02 \x01(\t\"9\n SubscribeToWorkflowEventsRequest\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\"\xe0\x01\n\rWorkflowEvent\x12\x15\n\rworkflowRunId\x18\x01 \x01(\t\x12#\n\x0cresourceType\x18\x02 \x01(\x0e\x32\r.ResourceType\x12%\n\teventType\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x12\n\nresourceId\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x65ventPayload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\"W\n\rOverridesData\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x16\n\x0e\x63\x61llerFilename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\x8a\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xde\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x32\xe4\x03\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -23,42 +23,42 @@ _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'dispatcher_pb2', _globals) if _descriptor._USE_C_DESCRIPTORS == False: _globals['DESCRIPTOR']._options = None _globals['DESCRIPTOR']._serialized_options = b'ZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts' - _globals['_ACTIONTYPE']._serialized_start=1572 - _globals['_ACTIONTYPE']._serialized_end=1650 - _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=1653 - _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=1815 - _globals['_STEPACTIONEVENTTYPE']._serialized_start=1818 - _globals['_STEPACTIONEVENTTYPE']._serialized_end=1956 - _globals['_RESOURCETYPE']._serialized_start=1958 - _globals['_RESOURCETYPE']._serialized_end=2059 - _globals['_RESOURCEEVENTTYPE']._serialized_start=2062 - _globals['_RESOURCEEVENTTYPE']._serialized_end=2284 + _globals['_ACTIONTYPE']._serialized_start=1590 + _globals['_ACTIONTYPE']._serialized_end=1668 + _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=1671 + _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=1833 + _globals['_STEPACTIONEVENTTYPE']._serialized_start=1836 + _globals['_STEPACTIONEVENTTYPE']._serialized_end=1974 + _globals['_RESOURCETYPE']._serialized_start=1976 + _globals['_RESOURCETYPE']._serialized_end=2077 + _globals['_RESOURCEEVENTTYPE']._serialized_start=2080 + _globals['_RESOURCEEVENTTYPE']._serialized_end=2302 _globals['_WORKERREGISTERREQUEST']._serialized_start=53 _globals['_WORKERREGISTERREQUEST']._serialized_end=165 _globals['_WORKERREGISTERRESPONSE']._serialized_start=167 _globals['_WORKERREGISTERRESPONSE']._serialized_end=247 _globals['_ASSIGNEDACTION']._serialized_start=250 - _globals['_ASSIGNEDACTION']._serialized_end=492 - _globals['_WORKERLISTENREQUEST']._serialized_start=494 - _globals['_WORKERLISTENREQUEST']._serialized_end=533 - _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=535 - _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=579 - _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=581 - _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=644 - _globals['_GROUPKEYACTIONEVENT']._serialized_start=647 - _globals['_GROUPKEYACTIONEVENT']._serialized_end=872 - _globals['_STEPACTIONEVENT']._serialized_start=875 - _globals['_STEPACTIONEVENT']._serialized_end=1111 - _globals['_ACTIONEVENTRESPONSE']._serialized_start=1113 - _globals['_ACTIONEVENTRESPONSE']._serialized_end=1170 - _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=1172 - _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=1229 - _globals['_WORKFLOWEVENT']._serialized_start=1232 - _globals['_WORKFLOWEVENT']._serialized_end=1456 - _globals['_OVERRIDESDATA']._serialized_start=1458 - _globals['_OVERRIDESDATA']._serialized_end=1545 - _globals['_OVERRIDESDATARESPONSE']._serialized_start=1547 - _globals['_OVERRIDESDATARESPONSE']._serialized_end=1570 - _globals['_DISPATCHER']._serialized_start=2287 - _globals['_DISPATCHER']._serialized_end=2771 + _globals['_ASSIGNEDACTION']._serialized_end=510 + _globals['_WORKERLISTENREQUEST']._serialized_start=512 + _globals['_WORKERLISTENREQUEST']._serialized_end=551 + _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_start=553 + _globals['_WORKERUNSUBSCRIBEREQUEST']._serialized_end=597 + _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_start=599 + _globals['_WORKERUNSUBSCRIBERESPONSE']._serialized_end=662 + _globals['_GROUPKEYACTIONEVENT']._serialized_start=665 + _globals['_GROUPKEYACTIONEVENT']._serialized_end=890 + _globals['_STEPACTIONEVENT']._serialized_start=893 + _globals['_STEPACTIONEVENT']._serialized_end=1129 + _globals['_ACTIONEVENTRESPONSE']._serialized_start=1131 + _globals['_ACTIONEVENTRESPONSE']._serialized_end=1188 + _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_start=1190 + _globals['_SUBSCRIBETOWORKFLOWEVENTSREQUEST']._serialized_end=1247 + _globals['_WORKFLOWEVENT']._serialized_start=1250 + _globals['_WORKFLOWEVENT']._serialized_end=1474 + _globals['_OVERRIDESDATA']._serialized_start=1476 + _globals['_OVERRIDESDATA']._serialized_end=1563 + _globals['_OVERRIDESDATARESPONSE']._serialized_start=1565 + _globals['_OVERRIDESDATARESPONSE']._serialized_end=1588 + _globals['_DISPATCHER']._serialized_start=2305 + _globals['_DISPATCHER']._serialized_end=2789 # @@protoc_insertion_point(module_scope) diff --git a/python-sdk/hatchet_sdk/dispatcher_pb2.pyi b/python-sdk/hatchet_sdk/dispatcher_pb2.pyi index 3df6a60cd..d6806835a 100644 --- a/python-sdk/hatchet_sdk/dispatcher_pb2.pyi +++ b/python-sdk/hatchet_sdk/dispatcher_pb2.pyi @@ -85,7 +85,7 @@ class WorkerRegisterResponse(_message.Message): def __init__(self, tenantId: _Optional[str] = ..., workerId: _Optional[str] = ..., workerName: _Optional[str] = ...) -> None: ... class AssignedAction(_message.Message): - __slots__ = ("tenantId", "workflowRunId", "getGroupKeyRunId", "jobId", "jobName", "jobRunId", "stepId", "stepRunId", "actionId", "actionType", "actionPayload") + __slots__ = ("tenantId", "workflowRunId", "getGroupKeyRunId", "jobId", "jobName", "jobRunId", "stepId", "stepRunId", "actionId", "actionType", "actionPayload", "stepName") TENANTID_FIELD_NUMBER: _ClassVar[int] WORKFLOWRUNID_FIELD_NUMBER: _ClassVar[int] GETGROUPKEYRUNID_FIELD_NUMBER: _ClassVar[int] @@ -97,6 +97,7 @@ class AssignedAction(_message.Message): ACTIONID_FIELD_NUMBER: _ClassVar[int] ACTIONTYPE_FIELD_NUMBER: _ClassVar[int] ACTIONPAYLOAD_FIELD_NUMBER: _ClassVar[int] + STEPNAME_FIELD_NUMBER: _ClassVar[int] tenantId: str workflowRunId: str getGroupKeyRunId: str @@ -108,7 +109,8 @@ class AssignedAction(_message.Message): actionId: str actionType: ActionType actionPayload: str - def __init__(self, tenantId: _Optional[str] = ..., workflowRunId: _Optional[str] = ..., getGroupKeyRunId: _Optional[str] = ..., jobId: _Optional[str] = ..., jobName: _Optional[str] = ..., jobRunId: _Optional[str] = ..., stepId: _Optional[str] = ..., stepRunId: _Optional[str] = ..., actionId: _Optional[str] = ..., actionType: _Optional[_Union[ActionType, str]] = ..., actionPayload: _Optional[str] = ...) -> None: ... + stepName: str + def __init__(self, tenantId: _Optional[str] = ..., workflowRunId: _Optional[str] = ..., getGroupKeyRunId: _Optional[str] = ..., jobId: _Optional[str] = ..., jobName: _Optional[str] = ..., jobRunId: _Optional[str] = ..., stepId: _Optional[str] = ..., stepRunId: _Optional[str] = ..., actionId: _Optional[str] = ..., actionType: _Optional[_Union[ActionType, str]] = ..., actionPayload: _Optional[str] = ..., stepName: _Optional[str] = ...) -> None: ... class WorkerListenRequest(_message.Message): __slots__ = ("workerId",) diff --git a/python-sdk/hatchet_sdk/events_pb2.py b/python-sdk/hatchet_sdk/events_pb2.py index da31c3c55..f3557d668 100644 --- a/python-sdk/hatchet_sdk/events_pb2.py +++ b/python-sdk/hatchet_sdk/events_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"|\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"d\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"/\n\x10ListEventRequest\x12\x0e\n\x06offset\x18\x01 \x01(\x05\x12\x0b\n\x03key\x18\x02 \x01(\t\"+\n\x11ListEventResponse\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x99\x01\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12/\n\x04List\x12\x11.ListEventRequest\x1a\x12.ListEventResponse\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"|\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x92\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\tB\x08\n\x06_level\"\x10\n\x0ePutLogResponse\"d\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"/\n\x10ListEventRequest\x12\x0e\n\x06offset\x18\x01 \x01(\x05\x12\x0b\n\x03key\x18\x02 \x01(\t\"+\n\x11ListEventResponse\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\xc6\x01\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12/\n\x04List\x12\x11.ListEventRequest\x1a\x12.ListEventResponse\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -25,14 +25,18 @@ if _descriptor._USE_C_DESCRIPTORS == False: _globals['DESCRIPTOR']._serialized_options = b'ZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts' _globals['_EVENT']._serialized_start=49 _globals['_EVENT']._serialized_end=173 - _globals['_PUSHEVENTREQUEST']._serialized_start=175 - _globals['_PUSHEVENTREQUEST']._serialized_end=275 - _globals['_LISTEVENTREQUEST']._serialized_start=277 - _globals['_LISTEVENTREQUEST']._serialized_end=324 - _globals['_LISTEVENTRESPONSE']._serialized_start=326 - _globals['_LISTEVENTRESPONSE']._serialized_end=369 - _globals['_REPLAYEVENTREQUEST']._serialized_start=371 - _globals['_REPLAYEVENTREQUEST']._serialized_end=408 - _globals['_EVENTSSERVICE']._serialized_start=411 - _globals['_EVENTSSERVICE']._serialized_end=564 + _globals['_PUTLOGREQUEST']._serialized_start=176 + _globals['_PUTLOGREQUEST']._serialized_end=322 + _globals['_PUTLOGRESPONSE']._serialized_start=324 + _globals['_PUTLOGRESPONSE']._serialized_end=340 + _globals['_PUSHEVENTREQUEST']._serialized_start=342 + _globals['_PUSHEVENTREQUEST']._serialized_end=442 + _globals['_LISTEVENTREQUEST']._serialized_start=444 + _globals['_LISTEVENTREQUEST']._serialized_end=491 + _globals['_LISTEVENTRESPONSE']._serialized_start=493 + _globals['_LISTEVENTRESPONSE']._serialized_end=536 + _globals['_REPLAYEVENTREQUEST']._serialized_start=538 + _globals['_REPLAYEVENTREQUEST']._serialized_end=575 + _globals['_EVENTSSERVICE']._serialized_start=578 + _globals['_EVENTSSERVICE']._serialized_end=776 # @@protoc_insertion_point(module_scope) diff --git a/python-sdk/hatchet_sdk/events_pb2.pyi b/python-sdk/hatchet_sdk/events_pb2.pyi index f118711d1..50cf3a516 100644 --- a/python-sdk/hatchet_sdk/events_pb2.pyi +++ b/python-sdk/hatchet_sdk/events_pb2.pyi @@ -20,6 +20,24 @@ class Event(_message.Message): eventTimestamp: _timestamp_pb2.Timestamp def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... +class PutLogRequest(_message.Message): + __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata") + STEPRUNID_FIELD_NUMBER: _ClassVar[int] + CREATEDAT_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + LEVEL_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + stepRunId: str + createdAt: _timestamp_pb2.Timestamp + message: str + level: str + metadata: str + def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ...) -> None: ... + +class PutLogResponse(_message.Message): + __slots__ = () + def __init__(self) -> None: ... + class PushEventRequest(_message.Message): __slots__ = ("key", "payload", "eventTimestamp") KEY_FIELD_NUMBER: _ClassVar[int] diff --git a/python-sdk/hatchet_sdk/events_pb2_grpc.py b/python-sdk/hatchet_sdk/events_pb2_grpc.py index 95c13d699..e67bc9d00 100644 --- a/python-sdk/hatchet_sdk/events_pb2_grpc.py +++ b/python-sdk/hatchet_sdk/events_pb2_grpc.py @@ -29,6 +29,11 @@ class EventsServiceStub(object): request_serializer=events__pb2.ReplayEventRequest.SerializeToString, response_deserializer=events__pb2.Event.FromString, ) + self.PutLog = channel.unary_unary( + '/EventsService/PutLog', + request_serializer=events__pb2.PutLogRequest.SerializeToString, + response_deserializer=events__pb2.PutLogResponse.FromString, + ) class EventsServiceServicer(object): @@ -52,6 +57,12 @@ class EventsServiceServicer(object): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def PutLog(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def add_EventsServiceServicer_to_server(servicer, server): rpc_method_handlers = { @@ -70,6 +81,11 @@ def add_EventsServiceServicer_to_server(servicer, server): request_deserializer=events__pb2.ReplayEventRequest.FromString, response_serializer=events__pb2.Event.SerializeToString, ), + 'PutLog': grpc.unary_unary_rpc_method_handler( + servicer.PutLog, + request_deserializer=events__pb2.PutLogRequest.FromString, + response_serializer=events__pb2.PutLogResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( 'EventsService', rpc_method_handlers) @@ -130,3 +146,20 @@ class EventsService(object): events__pb2.Event.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def PutLog(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/EventsService/PutLog', + events__pb2.PutLogRequest.SerializeToString, + events__pb2.PutLogResponse.FromString, + options, channel_credentials, + insecure, call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/python-sdk/hatchet_sdk/worker.py b/python-sdk/hatchet_sdk/worker.py index 9b4ffca5d..0dba28dfb 100644 --- a/python-sdk/hatchet_sdk/worker.py +++ b/python-sdk/hatchet_sdk/worker.py @@ -36,7 +36,7 @@ class Worker: def handle_start_step_run(self, action : Action): action_name = action.action_id - context = Context(action, self.client.dispatcher) + context = Context(action, self.client.dispatcher, self.client.event) self.contexts[action.step_run_id] = context @@ -110,7 +110,7 @@ class Worker: def handle_start_group_key_run(self, action : Action): action_name = action.action_id - context = Context(action, self.client.dispatcher) + context = Context(action, self.client.dispatcher, self.client.event) self.contexts[action.get_group_key_run_id] = context diff --git a/python-sdk/pyproject.toml b/python-sdk/pyproject.toml index 90ff03654..9e659ca6d 100644 --- a/python-sdk/pyproject.toml +++ b/python-sdk/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "hatchet-sdk" -version = "0.13.0" +version = "0.14.0" description = "" authors = ["Alexander Belanger "] readme = "README.md" diff --git a/typescript-sdk/examples/logger.ts b/typescript-sdk/examples/logger.ts new file mode 100644 index 000000000..e3fdcd2d3 --- /dev/null +++ b/typescript-sdk/examples/logger.ts @@ -0,0 +1,42 @@ +import Hatchet from '../src/sdk'; +import { Workflow } from '../src/workflow'; + +const hatchet = Hatchet.init({ + log_level: 'OFF', +}); + +const sleep = (ms: number) => + new Promise((resolve) => { + setTimeout(resolve, ms); + }); + +const workflow: Workflow = { + id: 'logger-example', + description: 'test', + on: { + event: 'user:create', + }, + steps: [ + { + name: 'logger-step1', + run: async (ctx) => { + // log in a for loop + // eslint-disable-next-line no-plusplus + for (let i = 0; i < 10; i++) { + ctx.log(`log message ${i}`); + await sleep(200); + } + + return { step1: 'completed step run' }; + }, + }, + ], +}; + +async function main() { + const worker = await hatchet.worker('logger-worker', 1); + await worker.registerWorkflow(workflow); + worker.start(); +} + +main(); diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index 95b3ef0cc..62063eb6f 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -1,6 +1,6 @@ { "name": "@hatchet-dev/typescript-sdk", - "version": "0.1.23", + "version": "0.1.24", "description": "Background task orchestration & visibility for developers", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -38,6 +38,7 @@ "worker:playground": "npm run exec -- ./examples/playground.ts", "worker:retries": "npm run exec -- ./examples/retries-worker.ts", "worker:multi-workflow": "npm run exec -- ./examples/multi-workflow.ts", + "worker:logger": "npm run exec -- ./examples/logger.ts", "api": "npm run exec -- ./examples/api.ts", "prepublish": "cp package.json dist/package.json;", "publish:ci": "rm -rf ./dist && npm run tsc:build && npm run prepublish && cd dist && npm publish --access public --no-git-checks", diff --git a/typescript-sdk/src/clients/event/event-client.ts b/typescript-sdk/src/clients/event/event-client.ts index eff91ab35..e7df983a1 100644 --- a/typescript-sdk/src/clients/event/event-client.ts +++ b/typescript-sdk/src/clients/event/event-client.ts @@ -7,6 +7,15 @@ import { import HatchetError from '@util/errors/hatchet-error'; import { ClientConfig } from '@clients/hatchet-client/client-config'; import { Logger } from '@hatchet/util/logger'; +import { retrier } from '@hatchet/util/retrier'; + +// eslint-disable-next-line no-shadow +export enum LogLevel { + INFO = 'INFO', + WARN = 'WARN', + ERROR = 'ERROR', + DEBUG = 'DEBUG', +} export class EventClient { config: ClientConfig; @@ -35,4 +44,24 @@ export class EventClient { throw new HatchetError(e.message); } } + + putLog(stepRunId: string, log: string, level?: LogLevel) { + const createdAt = new Date(); + + try { + retrier( + async () => + this.client.putLog({ + stepRunId, + createdAt, + message: log, + level: level || LogLevel.INFO, + }), + this.logger + ); + } catch (e: any) { + // log a warning, but this is not a fatal error + this.logger.warn(`Could not put log: ${e.message}`); + } + } } diff --git a/typescript-sdk/src/clients/rest/generated/Api.ts b/typescript-sdk/src/clients/rest/generated/Api.ts index 594b5e126..be431d34c 100644 --- a/typescript-sdk/src/clients/rest/generated/Api.ts +++ b/typescript-sdk/src/clients/rest/generated/Api.ts @@ -34,6 +34,11 @@ import { ListGithubBranchesResponse, ListGithubReposResponse, ListPullRequestsResponse, + LogLineLevelField, + LogLineList, + LogLineOrderByDirection, + LogLineOrderByField, + LogLineSearch, PullRequestState, RejectInviteRequest, ReplayEventRequest, @@ -784,6 +789,47 @@ export class Api extends HttpClient + this.request({ + path: `/api/v1/step-runs/${stepRun}/logs`, + method: 'GET', + query: query, + secure: true, + format: 'json', + ...params, + }); /** * @description Get the diff for a step run between the most recent run and the first run. * diff --git a/typescript-sdk/src/clients/rest/generated/data-contracts.ts b/typescript-sdk/src/clients/rest/generated/data-contracts.ts index bd698bb09..4b4edf194 100644 --- a/typescript-sdk/src/clients/rest/generated/data-contracts.ts +++ b/typescript-sdk/src/clients/rest/generated/data-contracts.ts @@ -684,3 +684,40 @@ export enum PullRequestState { Open = 'open', Closed = 'closed', } + +export interface LogLine { + /** + * The creation date of the log line. + * @format date-time + */ + createdAt: string; + /** The log message. */ + message: string; + /** The log metadata. */ + metadata: object; +} + +export enum LogLineLevel { + DEBUG = 'DEBUG', + INFO = 'INFO', + WARN = 'WARN', + ERROR = 'ERROR', +} + +export interface LogLineList { + pagination?: PaginationResponse; + rows?: LogLine[]; +} + +export enum LogLineOrderByField { + CreatedAt = 'createdAt', +} + +export enum LogLineOrderByDirection { + Asc = 'asc', + Desc = 'desc', +} + +export type LogLineSearch = string; + +export type LogLineLevelField = LogLineLevel[]; diff --git a/typescript-sdk/src/clients/worker/worker.ts b/typescript-sdk/src/clients/worker/worker.ts index b5af71d78..dd55a1a73 100644 --- a/typescript-sdk/src/clients/worker/worker.ts +++ b/typescript-sdk/src/clients/worker/worker.ts @@ -122,7 +122,7 @@ export class Worker { const { actionId } = action; try { - const context = new Context(action, this.client.dispatcher); + const context = new Context(action, this.client.dispatcher, this.client.event); this.contexts[action.stepRunId] = context; const step = this.action_registry[actionId]; @@ -188,7 +188,7 @@ export class Worker { const { actionId } = action; try { - const context = new Context(action, this.client.dispatcher); + const context = new Context(action, this.client.dispatcher, this.client.event); const key = action.getGroupKeyRunId; diff --git a/typescript-sdk/src/protoc/events/events.ts b/typescript-sdk/src/protoc/events/events.ts index fed7e6757..0eac3fa01 100644 --- a/typescript-sdk/src/protoc/events/events.ts +++ b/typescript-sdk/src/protoc/events/events.ts @@ -18,6 +18,26 @@ export interface Event { eventTimestamp: Date | undefined; } +export interface PutLogRequest { + /** the step run id for the request */ + stepRunId: string; + /** when the log line was created */ + createdAt: + | Date + | undefined; + /** the log line message */ + message: string; + /** the log line level */ + level?: + | string + | undefined; + /** associated log line metadata */ + metadata: string; +} + +export interface PutLogResponse { +} + export interface PushEventRequest { /** the key for the event */ key: string; @@ -163,6 +183,168 @@ export const Event = { }, }; +function createBasePutLogRequest(): PutLogRequest { + return { stepRunId: "", createdAt: undefined, message: "", level: undefined, metadata: "" }; +} + +export const PutLogRequest = { + encode(message: PutLogRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.stepRunId !== "") { + writer.uint32(10).string(message.stepRunId); + } + if (message.createdAt !== undefined) { + Timestamp.encode(toTimestamp(message.createdAt), writer.uint32(18).fork()).ldelim(); + } + if (message.message !== "") { + writer.uint32(26).string(message.message); + } + if (message.level !== undefined) { + writer.uint32(34).string(message.level); + } + if (message.metadata !== "") { + writer.uint32(42).string(message.metadata); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PutLogRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePutLogRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.stepRunId = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.createdAt = fromTimestamp(Timestamp.decode(reader, reader.uint32())); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.message = reader.string(); + continue; + case 4: + if (tag !== 34) { + break; + } + + message.level = reader.string(); + continue; + case 5: + if (tag !== 42) { + break; + } + + message.metadata = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): PutLogRequest { + return { + stepRunId: isSet(object.stepRunId) ? globalThis.String(object.stepRunId) : "", + createdAt: isSet(object.createdAt) ? fromJsonTimestamp(object.createdAt) : undefined, + message: isSet(object.message) ? globalThis.String(object.message) : "", + level: isSet(object.level) ? globalThis.String(object.level) : undefined, + metadata: isSet(object.metadata) ? globalThis.String(object.metadata) : "", + }; + }, + + toJSON(message: PutLogRequest): unknown { + const obj: any = {}; + if (message.stepRunId !== "") { + obj.stepRunId = message.stepRunId; + } + if (message.createdAt !== undefined) { + obj.createdAt = message.createdAt.toISOString(); + } + if (message.message !== "") { + obj.message = message.message; + } + if (message.level !== undefined) { + obj.level = message.level; + } + if (message.metadata !== "") { + obj.metadata = message.metadata; + } + return obj; + }, + + create(base?: DeepPartial): PutLogRequest { + return PutLogRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): PutLogRequest { + const message = createBasePutLogRequest(); + message.stepRunId = object.stepRunId ?? ""; + message.createdAt = object.createdAt ?? undefined; + message.message = object.message ?? ""; + message.level = object.level ?? undefined; + message.metadata = object.metadata ?? ""; + return message; + }, +}; + +function createBasePutLogResponse(): PutLogResponse { + return {}; +} + +export const PutLogResponse = { + encode(_: PutLogResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PutLogResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePutLogResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(_: any): PutLogResponse { + return {}; + }, + + toJSON(_: PutLogResponse): unknown { + const obj: any = {}; + return obj; + }, + + create(base?: DeepPartial): PutLogResponse { + return PutLogResponse.fromPartial(base ?? {}); + }, + fromPartial(_: DeepPartial): PutLogResponse { + const message = createBasePutLogResponse(); + return message; + }, +}; + function createBasePushEventRequest(): PushEventRequest { return { key: "", payload: "", eventTimestamp: undefined }; } @@ -469,6 +651,14 @@ export const EventsServiceDefinition = { responseStream: false, options: {}, }, + putLog: { + name: "PutLog", + requestType: PutLogRequest, + requestStream: false, + responseType: PutLogResponse, + responseStream: false, + options: {}, + }, }, } as const; @@ -476,12 +666,14 @@ export interface EventsServiceImplementation { push(request: PushEventRequest, context: CallContext & CallContextExt): Promise>; list(request: ListEventRequest, context: CallContext & CallContextExt): Promise>; replaySingleEvent(request: ReplayEventRequest, context: CallContext & CallContextExt): Promise>; + putLog(request: PutLogRequest, context: CallContext & CallContextExt): Promise>; } export interface EventsServiceClient { push(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; list(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; replaySingleEvent(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; + putLog(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; } type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; diff --git a/typescript-sdk/src/step.ts b/typescript-sdk/src/step.ts index 387085046..9969e3b6b 100644 --- a/typescript-sdk/src/step.ts +++ b/typescript-sdk/src/step.ts @@ -3,6 +3,8 @@ import * as z from 'zod'; import { HatchetTimeoutSchema } from './workflow'; import { Action } from './clients/dispatcher/action-listener'; import { DispatcherClient } from './clients/dispatcher/dispatcher-client'; +import { EventClient, LogLevel } from './clients/event/event-client'; +import { Logger } from './util/logger'; export const CreateStepSchema = z.object({ name: z.string(), @@ -28,14 +30,18 @@ export class Context { controller = new AbortController(); action: Action; client: DispatcherClient; + eventClient: EventClient; overridesData: Record = {}; + logger: Logger; - constructor(action: Action, client: DispatcherClient) { + constructor(action: Action, client: DispatcherClient, eventClient: EventClient) { try { const data = JSON.parse(JSON.parse(action.actionPayload)); this.data = data; this.action = action; this.client = client; + this.eventClient = eventClient; + this.logger = new Logger(`Context Logger`, client.config.log_level); // if this is a getGroupKeyRunId, the data is the workflow input if (action.getGroupKeyRunId !== '') { @@ -93,6 +99,18 @@ export class Context { return defaultValue; } + + log(message: string, level?: LogLevel): void { + const { stepRunId } = this.action; + + if (!stepRunId) { + // log a warning + this.logger.warn('cannot log from context without stepRunId'); + return; + } + + this.eventClient.putLog(stepRunId, message, level); + } } export type StepRunFunction = (ctx: Context) => Promise | NextStep | void;