mirror of
https://github.com/opencloud-eu/opencloud.git
synced 2026-01-05 03:40:01 -06:00
build(deps): bump github.com/open-policy-agent/opa from 0.62.1 to 0.64.1
Bumps [github.com/open-policy-agent/opa](https://github.com/open-policy-agent/opa) from 0.62.1 to 0.64.1. - [Release notes](https://github.com/open-policy-agent/opa/releases) - [Changelog](https://github.com/open-policy-agent/opa/blob/main/CHANGELOG.md) - [Commits](https://github.com/open-policy-agent/opa/compare/v0.62.1...v0.64.1) --- updated-dependencies: - dependency-name: github.com/open-policy-agent/opa dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com>
This commit is contained in:
committed by
Ralf Haferkamp
parent
237623178a
commit
2623d6cc75
4
go.mod
4
go.mod
@@ -69,7 +69,7 @@ require (
|
||||
github.com/onsi/ginkgo v1.16.5
|
||||
github.com/onsi/ginkgo/v2 v2.17.1
|
||||
github.com/onsi/gomega v1.33.0
|
||||
github.com/open-policy-agent/opa v0.62.1
|
||||
github.com/open-policy-agent/opa v0.64.1
|
||||
github.com/orcaman/concurrent-map v1.0.0
|
||||
github.com/owncloud/libre-graph-api-go v1.0.5-0.20240130152355-ac663a9002a1
|
||||
github.com/pkg/errors v0.9.1
|
||||
@@ -294,7 +294,7 @@ require (
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/pquerna/cachecontrol v0.1.0 // indirect
|
||||
github.com/prometheus/alertmanager v0.26.0 // indirect
|
||||
github.com/prometheus/client_model v0.5.0 // indirect
|
||||
github.com/prometheus/client_model v0.6.1 // indirect
|
||||
github.com/prometheus/common v0.48.0 // indirect
|
||||
github.com/prometheus/procfs v0.12.0 // indirect
|
||||
github.com/prometheus/statsd_exporter v0.22.8 // indirect
|
||||
|
||||
8
go.sum
8
go.sum
@@ -1788,8 +1788,8 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J
|
||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||
github.com/onsi/gomega v1.33.0 h1:snPCflnZrpMsy94p4lXVEkHo12lmPnc3vY5XBbreexE=
|
||||
github.com/onsi/gomega v1.33.0/go.mod h1:+925n5YtiFsLzzafLUHzVMBpvvRAzrydIBiSIxjX3wY=
|
||||
github.com/open-policy-agent/opa v0.62.1 h1:UcxBQ0fe6NEjkYc775j4PWoUFFhx4f6yXKIKSTAuTVk=
|
||||
github.com/open-policy-agent/opa v0.62.1/go.mod h1:YqiSIIuvKwyomtnnXkJvy0E3KtVKbavjPJ/hNMuOmeM=
|
||||
github.com/open-policy-agent/opa v0.64.1 h1:n8IJTYlFWzqiOYx+JiawbErVxiqAyXohovcZxYbskxQ=
|
||||
github.com/open-policy-agent/opa v0.64.1/go.mod h1:j4VeLorVpKipnkQ2TDjWshEuV3cvP/rHzQhYaraUXZY=
|
||||
github.com/opencontainers/runtime-spec v1.1.0 h1:HHUyrt9mwHUjtasSbXSMvs4cyFxh+Bll4AjJ9odEGpg=
|
||||
github.com/opencontainers/runtime-spec v1.1.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
|
||||
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
@@ -1864,8 +1864,8 @@ github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
|
||||
github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
|
||||
github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI=
|
||||
github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E=
|
||||
github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY=
|
||||
github.com/prometheus/common v0.0.0-20170706130215-fb369f752a7f/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4=
|
||||
|
||||
48
vendor/github.com/open-policy-agent/opa/ast/builtins.go
generated
vendored
48
vendor/github.com/open-policy-agent/opa/ast/builtins.go
generated
vendored
@@ -142,6 +142,7 @@ var DefaultBuiltins = [...]*Builtin{
|
||||
|
||||
// Encoding
|
||||
JSONMarshal,
|
||||
JSONMarshalWithOptions,
|
||||
JSONUnmarshal,
|
||||
JSONIsValid,
|
||||
Base64Encode,
|
||||
@@ -207,6 +208,7 @@ var DefaultBuiltins = [...]*Builtin{
|
||||
// Crypto
|
||||
CryptoX509ParseCertificates,
|
||||
CryptoX509ParseAndVerifyCertificates,
|
||||
CryptoX509ParseAndVerifyCertificatesWithOptions,
|
||||
CryptoMd5,
|
||||
CryptoSha1,
|
||||
CryptoSha256,
|
||||
@@ -1706,6 +1708,27 @@ var JSONMarshal = &Builtin{
|
||||
Categories: encoding,
|
||||
}
|
||||
|
||||
var JSONMarshalWithOptions = &Builtin{
|
||||
Name: "json.marshal_with_options",
|
||||
Description: "Serializes the input term JSON, with additional formatting options via the `opts` parameter. " +
|
||||
"`opts` accepts keys `pretty` (enable multi-line/formatted JSON), `prefix` (string to prefix lines with, default empty string) and `indent` (string to indent with, default `\\t`).",
|
||||
Decl: types.NewFunction(
|
||||
types.Args(
|
||||
types.Named("x", types.A).Description("the term to serialize"),
|
||||
types.Named("opts", types.NewObject(
|
||||
[]*types.StaticProperty{
|
||||
types.NewStaticProperty("pretty", types.B),
|
||||
types.NewStaticProperty("indent", types.S),
|
||||
types.NewStaticProperty("prefix", types.S),
|
||||
},
|
||||
types.NewDynamicProperty(types.S, types.A),
|
||||
)).Description("encoding options"),
|
||||
),
|
||||
types.Named("y", types.S).Description("the JSON string representation of `x`, with configured prefix/indent string(s) as appropriate"),
|
||||
),
|
||||
Categories: encoding,
|
||||
}
|
||||
|
||||
var JSONUnmarshal = &Builtin{
|
||||
Name: "json.unmarshal",
|
||||
Description: "Deserializes the input string.",
|
||||
@@ -2327,6 +2350,31 @@ with all others being treated as intermediates.`,
|
||||
),
|
||||
}
|
||||
|
||||
var CryptoX509ParseAndVerifyCertificatesWithOptions = &Builtin{
|
||||
Name: "crypto.x509.parse_and_verify_certificates_with_options",
|
||||
Description: `Returns one or more certificates from the given string containing PEM
|
||||
or base64 encoded DER certificates after verifying the supplied certificates form a complete
|
||||
certificate chain back to a trusted root. A config option passed as the second argument can
|
||||
be used to configure the validation options used.
|
||||
|
||||
The first certificate is treated as the root and the last is treated as the leaf,
|
||||
with all others being treated as intermediates.`,
|
||||
|
||||
Decl: types.NewFunction(
|
||||
types.Args(
|
||||
types.Named("certs", types.S).Description("base64 encoded DER or PEM data containing two or more certificates where the first is a root CA, the last is a leaf certificate, and all others are intermediate CAs"),
|
||||
types.Named("options", types.NewObject(
|
||||
nil,
|
||||
types.NewDynamicProperty(types.S, types.A),
|
||||
)).Description("object containing extra configs to verify the validity of certificates. `options` object supports four fields which maps to same fields in [x509.VerifyOptions struct](https://pkg.go.dev/crypto/x509#VerifyOptions). `DNSName`, `CurrentTime`: Nanoseconds since the Unix Epoch as a number, `MaxConstraintComparisons` and `KeyUsages`. `KeyUsages` is list and can have possible values as in: `\"KeyUsageAny\"`, `\"KeyUsageServerAuth\"`, `\"KeyUsageClientAuth\"`, `\"KeyUsageCodeSigning\"`, `\"KeyUsageEmailProtection\"`, `\"KeyUsageIPSECEndSystem\"`, `\"KeyUsageIPSECTunnel\"`, `\"KeyUsageIPSECUser\"`, `\"KeyUsageTimeStamping\"`, `\"KeyUsageOCSPSigning\"`, `\"KeyUsageMicrosoftServerGatedCrypto\"`, `\"KeyUsageNetscapeServerGatedCrypto\"`, `\"KeyUsageMicrosoftCommercialCodeSigning\"`, `\"KeyUsageMicrosoftKernelCodeSigning\"` "),
|
||||
),
|
||||
types.Named("output", types.NewArray([]types.Type{
|
||||
types.B,
|
||||
types.NewArray(nil, types.NewObject(nil, types.NewDynamicProperty(types.S, types.A))),
|
||||
}, nil)).Description("array of `[valid, certs]`: if the input certificate chain could be verified then `valid` is `true` and `certs` is an array of X.509 certificates represented as objects; if the input certificate chain could not be verified then `valid` is `false` and `certs` is `[]`"),
|
||||
),
|
||||
}
|
||||
|
||||
var CryptoX509ParseCertificateRequest = &Builtin{
|
||||
Name: "crypto.x509.parse_certificate_request",
|
||||
Description: "Returns a PKCS #10 certificate signing request from the given PEM-encoded PKCS#10 certificate signing request.",
|
||||
|
||||
21
vendor/github.com/open-policy-agent/opa/ast/parser.go
generated
vendored
21
vendor/github.com/open-policy-agent/opa/ast/parser.go
generated
vendored
@@ -43,6 +43,20 @@ const (
|
||||
RegoV1
|
||||
)
|
||||
|
||||
func (v RegoVersion) Int() int {
|
||||
if v == RegoV1 {
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func RegoVersionFromInt(i int) RegoVersion {
|
||||
if i == 1 {
|
||||
return RegoV1
|
||||
}
|
||||
return RegoV0
|
||||
}
|
||||
|
||||
// Note: This state is kept isolated from the parser so that we
|
||||
// can do efficient shallow copies of these values when doing a
|
||||
// save() and restore().
|
||||
@@ -2556,6 +2570,11 @@ var futureKeywords = map[string]tokens.Token{
|
||||
"if": tokens.If,
|
||||
}
|
||||
|
||||
func IsFutureKeyword(s string) bool {
|
||||
_, ok := futureKeywords[s]
|
||||
return ok
|
||||
}
|
||||
|
||||
func (p *Parser) futureImport(imp *Import, allowedFutureKeywords map[string]tokens.Token) {
|
||||
path := imp.Path.Value.(Ref)
|
||||
|
||||
@@ -2616,7 +2635,7 @@ func (p *Parser) regoV1Import(imp *Import) {
|
||||
path := imp.Path.Value.(Ref)
|
||||
|
||||
if len(path) == 1 || !path[1].Equal(RegoV1CompatibleRef[1]) || len(path) > 2 {
|
||||
p.errorf(imp.Path.Location, "invalid import, must be `%s`", RegoV1CompatibleRef)
|
||||
p.errorf(imp.Path.Location, "invalid import `%s`, must be `%s`", path, RegoV1CompatibleRef)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
8
vendor/github.com/open-policy-agent/opa/ast/policy.go
generated
vendored
8
vendor/github.com/open-policy-agent/opa/ast/policy.go
generated
vendored
@@ -407,6 +407,12 @@ func (mod *Module) RegoVersion() RegoVersion {
|
||||
return mod.regoVersion
|
||||
}
|
||||
|
||||
// SetRegoVersion sets the RegoVersion for the module.
|
||||
// Note: Setting a rego-version that does not match the module's rego-version might have unintended consequences.
|
||||
func (mod *Module) SetRegoVersion(v RegoVersion) {
|
||||
mod.regoVersion = v
|
||||
}
|
||||
|
||||
// NewComment returns a new Comment object.
|
||||
func NewComment(text []byte) *Comment {
|
||||
return &Comment{
|
||||
@@ -1234,7 +1240,7 @@ func (expr *Expr) Equal(other *Expr) bool {
|
||||
//
|
||||
// 1. Declarations are always less than other expressions.
|
||||
// 2. Preceding expression (by Index) is always less than the other expression.
|
||||
// 3. Non-negated expressions are always less than than negated expressions.
|
||||
// 3. Non-negated expressions are always less than negated expressions.
|
||||
// 4. Single term expressions are always less than built-in expressions.
|
||||
//
|
||||
// Otherwise, the expression terms are compared normally. If both expressions
|
||||
|
||||
14
vendor/github.com/open-policy-agent/opa/ast/version_index.json
generated
vendored
14
vendor/github.com/open-policy-agent/opa/ast/version_index.json
generated
vendored
@@ -280,6 +280,13 @@
|
||||
"PreRelease": "",
|
||||
"Metadata": ""
|
||||
},
|
||||
"crypto.x509.parse_and_verify_certificates_with_options": {
|
||||
"Major": 0,
|
||||
"Minor": 63,
|
||||
"Patch": 0,
|
||||
"PreRelease": "",
|
||||
"Metadata": ""
|
||||
},
|
||||
"crypto.x509.parse_certificate_request": {
|
||||
"Major": 0,
|
||||
"Minor": 21,
|
||||
@@ -679,6 +686,13 @@
|
||||
"PreRelease": "",
|
||||
"Metadata": ""
|
||||
},
|
||||
"json.marshal_with_options": {
|
||||
"Major": 0,
|
||||
"Minor": 64,
|
||||
"Patch": 0,
|
||||
"PreRelease": "",
|
||||
"Metadata": ""
|
||||
},
|
||||
"json.match_schema": {
|
||||
"Major": 0,
|
||||
"Minor": 50,
|
||||
|
||||
264
vendor/github.com/open-policy-agent/opa/bundle/bundle.go
generated
vendored
264
vendor/github.com/open-policy-agent/opa/bundle/bundle.go
generated
vendored
@@ -15,10 +15,12 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/url"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
"github.com/gobwas/glob"
|
||||
"github.com/open-policy-agent/opa/ast"
|
||||
astJSON "github.com/open-policy-agent/opa/ast/json"
|
||||
"github.com/open-policy-agent/opa/format"
|
||||
@@ -120,10 +122,28 @@ func NewFile(name, hash, alg string) FileInfo {
|
||||
// Manifest represents the manifest from a bundle. The manifest may contain
|
||||
// metadata such as the bundle revision.
|
||||
type Manifest struct {
|
||||
Revision string `json:"revision"`
|
||||
Roots *[]string `json:"roots,omitempty"`
|
||||
WasmResolvers []WasmResolver `json:"wasm,omitempty"`
|
||||
Metadata map[string]interface{} `json:"metadata,omitempty"`
|
||||
Revision string `json:"revision"`
|
||||
Roots *[]string `json:"roots,omitempty"`
|
||||
WasmResolvers []WasmResolver `json:"wasm,omitempty"`
|
||||
// RegoVersion is the global Rego version for the bundle described by this Manifest.
|
||||
// The Rego version of individual files can be overridden in FileRegoVersions.
|
||||
// We don't use ast.RegoVersion here, as this iota type's order isn't guaranteed to be stable over time.
|
||||
// We use a pointer so that we can support hand-made bundles that don't have an explicit version appropriately.
|
||||
// E.g. in OPA 0.x if --v1-compatible is used when consuming the bundle, and there is no specified version,
|
||||
// we should default to v1; if --v1-compatible isn't used, we should default to v0. In OPA 1.0, no --x-compatible
|
||||
// flag and no explicit bundle version should default to v1.
|
||||
RegoVersion *int `json:"rego_version,omitempty"`
|
||||
// FileRegoVersions is a map from file paths to Rego versions.
|
||||
// This allows individual files to override the global Rego version specified by RegoVersion.
|
||||
FileRegoVersions map[string]int `json:"file_rego_versions,omitempty"`
|
||||
Metadata map[string]interface{} `json:"metadata,omitempty"`
|
||||
|
||||
compiledFileRegoVersions []fileRegoVersion
|
||||
}
|
||||
|
||||
type fileRegoVersion struct {
|
||||
path glob.Glob
|
||||
version int
|
||||
}
|
||||
|
||||
// WasmResolver maps a wasm module to an entrypoint ref.
|
||||
@@ -150,6 +170,15 @@ func (m *Manifest) AddRoot(r string) {
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Manifest) SetRegoVersion(v ast.RegoVersion) {
|
||||
m.Init()
|
||||
regoVersion := 0
|
||||
if v == ast.RegoV1 {
|
||||
regoVersion = 1
|
||||
}
|
||||
m.RegoVersion = ®oVersion
|
||||
}
|
||||
|
||||
// Equal returns true if m is semantically equivalent to other.
|
||||
func (m Manifest) Equal(other Manifest) bool {
|
||||
|
||||
@@ -161,6 +190,19 @@ func (m Manifest) Equal(other Manifest) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
if m.RegoVersion == nil && other.RegoVersion != nil {
|
||||
return false
|
||||
}
|
||||
if m.RegoVersion != nil && other.RegoVersion == nil {
|
||||
return false
|
||||
}
|
||||
if m.RegoVersion != nil && other.RegoVersion != nil && *m.RegoVersion != *other.RegoVersion {
|
||||
return false
|
||||
}
|
||||
if !reflect.DeepEqual(m.FileRegoVersions, other.FileRegoVersions) {
|
||||
return false
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(m.Metadata, other.Metadata) {
|
||||
return false
|
||||
}
|
||||
@@ -197,7 +239,12 @@ func (m Manifest) Copy() Manifest {
|
||||
|
||||
func (m Manifest) String() string {
|
||||
m.Init()
|
||||
return fmt.Sprintf("<revision: %q, roots: %v, wasm: %+v, metadata: %+v>", m.Revision, *m.Roots, m.WasmResolvers, m.Metadata)
|
||||
if m.RegoVersion != nil {
|
||||
return fmt.Sprintf("<revision: %q, rego_version: %d, roots: %v, wasm: %+v, metadata: %+v>",
|
||||
m.Revision, *m.RegoVersion, *m.Roots, m.WasmResolvers, m.Metadata)
|
||||
}
|
||||
return fmt.Sprintf("<revision: %q, roots: %v, wasm: %+v, metadata: %+v>",
|
||||
m.Revision, *m.Roots, m.WasmResolvers, m.Metadata)
|
||||
}
|
||||
|
||||
func (m Manifest) rootSet() stringSet {
|
||||
@@ -358,10 +405,11 @@ func (m *Manifest) validateAndInjectDefaults(b Bundle) error {
|
||||
|
||||
// ModuleFile represents a single module contained in a bundle.
|
||||
type ModuleFile struct {
|
||||
URL string
|
||||
Path string
|
||||
Raw []byte
|
||||
Parsed *ast.Module
|
||||
URL string
|
||||
Path string
|
||||
RelativePath string
|
||||
Raw []byte
|
||||
Parsed *ast.Module
|
||||
}
|
||||
|
||||
// WasmModuleFile represents a single wasm module contained in a bundle.
|
||||
@@ -543,6 +591,7 @@ func (r *Reader) Read() (Bundle, error) {
|
||||
bundle.Data = map[string]interface{}{}
|
||||
}
|
||||
|
||||
var modules []ModuleFile
|
||||
for _, f := range descriptors {
|
||||
buf, err := readFile(f, r.sizeLimitBytes)
|
||||
if err != nil {
|
||||
@@ -583,20 +632,14 @@ func (r *Reader) Read() (Bundle, error) {
|
||||
raw = append(raw, Raw{Path: p, Value: bs})
|
||||
}
|
||||
|
||||
r.metrics.Timer(metrics.RegoModuleParse).Start()
|
||||
module, err := ast.ParseModuleWithOpts(fullPath, buf.String(), r.ParserOptions())
|
||||
r.metrics.Timer(metrics.RegoModuleParse).Stop()
|
||||
if err != nil {
|
||||
return bundle, err
|
||||
}
|
||||
|
||||
// Modules are parsed after we've had a chance to read the manifest
|
||||
mf := ModuleFile{
|
||||
URL: f.URL(),
|
||||
Path: fullPath,
|
||||
Raw: bs,
|
||||
Parsed: module,
|
||||
URL: f.URL(),
|
||||
Path: fullPath,
|
||||
RelativePath: path,
|
||||
Raw: bs,
|
||||
}
|
||||
bundle.Modules = append(bundle.Modules, mf)
|
||||
modules = append(modules, mf)
|
||||
} else if filepath.Base(path) == WasmFile {
|
||||
bundle.WasmModules = append(bundle.WasmModules, WasmModuleFile{
|
||||
URL: f.URL(),
|
||||
@@ -656,6 +699,23 @@ func (r *Reader) Read() (Bundle, error) {
|
||||
}
|
||||
}
|
||||
|
||||
// Parse modules
|
||||
popts := r.ParserOptions()
|
||||
popts.RegoVersion = bundle.RegoVersion(popts.RegoVersion)
|
||||
for _, mf := range modules {
|
||||
modulePopts := popts
|
||||
if modulePopts.RegoVersion, err = bundle.RegoVersionForFile(mf.RelativePath, popts.RegoVersion); err != nil {
|
||||
return bundle, err
|
||||
}
|
||||
r.metrics.Timer(metrics.RegoModuleParse).Start()
|
||||
mf.Parsed, err = ast.ParseModuleWithOpts(mf.Path, string(mf.Raw), modulePopts)
|
||||
r.metrics.Timer(metrics.RegoModuleParse).Stop()
|
||||
if err != nil {
|
||||
return bundle, err
|
||||
}
|
||||
bundle.Modules = append(bundle.Modules, mf)
|
||||
}
|
||||
|
||||
if bundle.Type() == DeltaBundleType {
|
||||
if len(bundle.Data) != 0 {
|
||||
return bundle, fmt.Errorf("delta bundle expected to contain only patch file but data files found")
|
||||
@@ -1012,7 +1072,7 @@ func hashBundleFiles(hash SignatureHasher, b *Bundle) ([]FileInfo, error) {
|
||||
}
|
||||
|
||||
// FormatModules formats Rego modules
|
||||
// Modules will be formatted to comply with rego-v1, but Rego compatibility of individual parsed modules will be respected (e.g. if 'rego.v1' is imported).
|
||||
// Modules will be formatted to comply with rego-v0, but Rego compatibility of individual parsed modules will be respected (e.g. if 'rego.v1' is imported).
|
||||
func (b *Bundle) FormatModules(useModulePath bool) error {
|
||||
return b.FormatModulesForRegoVersion(ast.RegoV0, true, useModulePath)
|
||||
}
|
||||
@@ -1022,8 +1082,15 @@ func (b *Bundle) FormatModulesForRegoVersion(version ast.RegoVersion, preserveMo
|
||||
var err error
|
||||
|
||||
for i, module := range b.Modules {
|
||||
opts := format.Opts{}
|
||||
if preserveModuleRegoVersion {
|
||||
opts.RegoVersion = module.Parsed.RegoVersion()
|
||||
} else {
|
||||
opts.RegoVersion = version
|
||||
}
|
||||
|
||||
if module.Raw == nil {
|
||||
module.Raw, err = format.AstWithOpts(module.Parsed, format.Opts{RegoVersion: version})
|
||||
module.Raw, err = format.AstWithOpts(module.Parsed, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -1033,13 +1100,6 @@ func (b *Bundle) FormatModulesForRegoVersion(version ast.RegoVersion, preserveMo
|
||||
path = module.Path
|
||||
}
|
||||
|
||||
opts := format.Opts{}
|
||||
if preserveModuleRegoVersion {
|
||||
opts.RegoVersion = module.Parsed.RegoVersion()
|
||||
} else {
|
||||
opts.RegoVersion = version
|
||||
}
|
||||
|
||||
module.Raw, err = format.SourceWithOpts(path, module.Raw, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -1111,6 +1171,65 @@ func (b *Bundle) ParsedModules(bundleName string) map[string]*ast.Module {
|
||||
return mods
|
||||
}
|
||||
|
||||
func (b *Bundle) RegoVersion(def ast.RegoVersion) ast.RegoVersion {
|
||||
if v := b.Manifest.RegoVersion; v != nil {
|
||||
if *v == 0 {
|
||||
return ast.RegoV0
|
||||
} else if *v == 1 {
|
||||
return ast.RegoV1
|
||||
}
|
||||
}
|
||||
return def
|
||||
}
|
||||
|
||||
func (b *Bundle) SetRegoVersion(v ast.RegoVersion) {
|
||||
b.Manifest.SetRegoVersion(v)
|
||||
}
|
||||
|
||||
// RegoVersionForFile returns the rego-version for the specified file path.
|
||||
// If there is no defined version for the given path, the default version def is returned.
|
||||
// If the version does not correspond to ast.RegoV0 or ast.RegoV1, an error is returned.
|
||||
func (b *Bundle) RegoVersionForFile(path string, def ast.RegoVersion) (ast.RegoVersion, error) {
|
||||
if version, err := b.Manifest.numericRegoVersionForFile(path); err != nil {
|
||||
return def, err
|
||||
} else if version == nil {
|
||||
return def, nil
|
||||
} else if *version == 0 {
|
||||
return ast.RegoV0, nil
|
||||
} else if *version == 1 {
|
||||
return ast.RegoV1, nil
|
||||
} else {
|
||||
return def, fmt.Errorf("unknown bundle rego-version %d for file '%s'", *version, path)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *Manifest) numericRegoVersionForFile(path string) (*int, error) {
|
||||
var version *int
|
||||
|
||||
if len(m.FileRegoVersions) != len(m.compiledFileRegoVersions) {
|
||||
m.compiledFileRegoVersions = make([]fileRegoVersion, 0, len(m.FileRegoVersions))
|
||||
for pattern, v := range m.FileRegoVersions {
|
||||
compiled, err := glob.Compile(pattern)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to compile glob pattern %s: %s", pattern, err)
|
||||
}
|
||||
m.compiledFileRegoVersions = append(m.compiledFileRegoVersions, fileRegoVersion{compiled, v})
|
||||
}
|
||||
}
|
||||
|
||||
for _, fv := range m.compiledFileRegoVersions {
|
||||
if fv.path.Match(path) {
|
||||
version = &fv.version
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if version == nil {
|
||||
version = m.RegoVersion
|
||||
}
|
||||
return version, nil
|
||||
}
|
||||
|
||||
// Equal returns true if this bundle's contents equal the other bundle's
|
||||
// contents.
|
||||
func (b Bundle) Equal(other Bundle) bool {
|
||||
@@ -1261,13 +1380,33 @@ func mktree(path []string, value interface{}) (map[string]interface{}, error) {
|
||||
// will have an empty revision except in the special case where a single bundle is provided
|
||||
// (and in that case the bundle is just returned unmodified.)
|
||||
func Merge(bundles []*Bundle) (*Bundle, error) {
|
||||
return MergeWithRegoVersion(bundles, ast.RegoV0, false)
|
||||
}
|
||||
|
||||
// MergeWithRegoVersion creates a merged bundle from the provided bundles, similar to Merge.
|
||||
// If more than one bundle is provided, the rego version of the result bundle is set to the provided regoVersion.
|
||||
// Any Rego files in a bundle of conflicting rego version will be marked in the result's manifest with the rego version
|
||||
// of its original bundle. If the Rego file already had an overriding rego version, it will be preserved.
|
||||
// If a single bundle is provided, it will retain any rego version information it already had. If it has none, the
|
||||
// provided regoVersion will be applied to it.
|
||||
// If usePath is true, per-file rego-versions will be calculated using the file's ModuleFile.Path; otherwise, the file's
|
||||
// ModuleFile.URL will be used.
|
||||
func MergeWithRegoVersion(bundles []*Bundle, regoVersion ast.RegoVersion, usePath bool) (*Bundle, error) {
|
||||
|
||||
if len(bundles) == 0 {
|
||||
return nil, errors.New("expected at least one bundle")
|
||||
}
|
||||
|
||||
if len(bundles) == 1 {
|
||||
return bundles[0], nil
|
||||
result := bundles[0]
|
||||
// We respect the bundle rego-version, defaulting to the provided rego version if not set.
|
||||
result.SetRegoVersion(result.RegoVersion(regoVersion))
|
||||
fileRegoVersions, err := bundleRegoVersions(result, result.RegoVersion(regoVersion), usePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Manifest.FileRegoVersions = fileRegoVersions
|
||||
return result, nil
|
||||
}
|
||||
|
||||
var roots []string
|
||||
@@ -1296,8 +1435,24 @@ func Merge(bundles []*Bundle) (*Bundle, error) {
|
||||
result.WasmModules = append(result.WasmModules, b.WasmModules...)
|
||||
result.PlanModules = append(result.PlanModules, b.PlanModules...)
|
||||
|
||||
if b.Manifest.RegoVersion != nil || len(b.Manifest.FileRegoVersions) > 0 {
|
||||
if result.Manifest.FileRegoVersions == nil {
|
||||
result.Manifest.FileRegoVersions = map[string]int{}
|
||||
}
|
||||
|
||||
fileRegoVersions, err := bundleRegoVersions(b, regoVersion, usePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for k, v := range fileRegoVersions {
|
||||
result.Manifest.FileRegoVersions[k] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We respect the bundle rego-version, defaulting to the provided rego version if not set.
|
||||
result.SetRegoVersion(result.RegoVersion(regoVersion))
|
||||
|
||||
if result.Data == nil {
|
||||
result.Data = map[string]interface{}{}
|
||||
}
|
||||
@@ -1311,6 +1466,53 @@ func Merge(bundles []*Bundle) (*Bundle, error) {
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
func bundleRegoVersions(bundle *Bundle, regoVersion ast.RegoVersion, usePath bool) (map[string]int, error) {
|
||||
fileRegoVersions := map[string]int{}
|
||||
|
||||
// we drop the bundle-global rego versions and record individual rego versions for each module.
|
||||
for _, m := range bundle.Modules {
|
||||
// We fetch rego-version by the path relative to the bundle root, as the complete path of the module might
|
||||
// contain the path between OPA working directory and the bundle root.
|
||||
v, err := bundle.RegoVersionForFile(bundleRelativePath(m, usePath), bundle.RegoVersion(regoVersion))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// only record the rego version if it's different from one applied globally to the result bundle
|
||||
if v != regoVersion {
|
||||
// We store the rego version by the absolute path to the bundle root, as this will be the - possibly new - path
|
||||
// to the module inside the merged bundle.
|
||||
fileRegoVersions[bundleAbsolutePath(m, usePath)] = v.Int()
|
||||
}
|
||||
}
|
||||
|
||||
return fileRegoVersions, nil
|
||||
}
|
||||
|
||||
func bundleRelativePath(m ModuleFile, usePath bool) string {
|
||||
p := m.RelativePath
|
||||
if p == "" {
|
||||
if usePath {
|
||||
p = m.Path
|
||||
} else {
|
||||
p = m.URL
|
||||
}
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
func bundleAbsolutePath(m ModuleFile, usePath bool) string {
|
||||
var p string
|
||||
if usePath {
|
||||
p = m.Path
|
||||
} else {
|
||||
p = m.URL
|
||||
}
|
||||
if !path.IsAbs(p) {
|
||||
p = "/" + p
|
||||
}
|
||||
return path.Clean(p)
|
||||
}
|
||||
|
||||
// RootPathsOverlap takes in two bundle root paths and returns true if they overlap.
|
||||
func RootPathsOverlap(pathA string, pathB string) bool {
|
||||
a := rootPathSegments(pathA)
|
||||
|
||||
4781
vendor/github.com/open-policy-agent/opa/capabilities/v0.63.0.json
generated
vendored
Normal file
4781
vendor/github.com/open-policy-agent/opa/capabilities/v0.63.0.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
4826
vendor/github.com/open-policy-agent/opa/capabilities/v0.64.0.json
generated
vendored
Normal file
4826
vendor/github.com/open-policy-agent/opa/capabilities/v0.64.0.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
4826
vendor/github.com/open-policy-agent/opa/capabilities/v0.64.1.json
generated
vendored
Normal file
4826
vendor/github.com/open-policy-agent/opa/capabilities/v0.64.1.json
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
31
vendor/github.com/open-policy-agent/opa/format/format.go
generated
vendored
31
vendor/github.com/open-policy-agent/opa/format/format.go
generated
vendored
@@ -1106,15 +1106,44 @@ func (w *writer) writeIterableLine(elements []interface{}, comments []*ast.Comme
|
||||
func (w *writer) objectWriter() entryWriter {
|
||||
return func(x interface{}, comments []*ast.Comment) []*ast.Comment {
|
||||
entry := x.([2]*ast.Term)
|
||||
|
||||
call, isCall := entry[0].Value.(ast.Call)
|
||||
|
||||
paren := false
|
||||
if isCall && ast.Or.Ref().Equal(call[0].Value) && entry[0].Location.Text[0] == 40 { // Starts with "("
|
||||
paren = true
|
||||
w.write("(")
|
||||
}
|
||||
|
||||
comments = w.writeTerm(entry[0], comments)
|
||||
if paren {
|
||||
w.write(")")
|
||||
}
|
||||
|
||||
w.write(": ")
|
||||
|
||||
call, isCall = entry[1].Value.(ast.Call)
|
||||
if isCall && ast.Or.Ref().Equal(call[0].Value) && entry[1].Location.Text[0] == 40 { // Starts with "("
|
||||
w.write("(")
|
||||
defer w.write(")")
|
||||
}
|
||||
|
||||
return w.writeTerm(entry[1], comments)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *writer) listWriter() entryWriter {
|
||||
return func(x interface{}, comments []*ast.Comment) []*ast.Comment {
|
||||
return w.writeTerm(x.(*ast.Term), comments)
|
||||
t, ok := x.(*ast.Term)
|
||||
if ok {
|
||||
call, isCall := t.Value.(ast.Call)
|
||||
if isCall && ast.Or.Ref().Equal(call[0].Value) && t.Location.Text[0] == 40 { // Starts with "("
|
||||
w.write("(")
|
||||
defer w.write(")")
|
||||
}
|
||||
}
|
||||
|
||||
return w.writeTerm(t, comments)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ func init() {
|
||||
*
|
||||
* D) When comparing "between" a set of fields and a referenced fragment, first
|
||||
* a comparison is made between each field in the original set of fields and
|
||||
* each field in the the referenced set of fields.
|
||||
* each field in the referenced set of fields.
|
||||
*
|
||||
* E) Also, if any fragment is referenced in the referenced selection set,
|
||||
* then a comparison is made "between" the original set of fields and the
|
||||
|
||||
22
vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4.go
generated
vendored
22
vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4.go
generated
vendored
@@ -17,6 +17,8 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
v4 "github.com/open-policy-agent/opa/internal/providers/aws/v4"
|
||||
|
||||
"github.com/open-policy-agent/opa/ast"
|
||||
)
|
||||
|
||||
@@ -104,7 +106,7 @@ func SignRequest(req *http.Request, service string, creds Credentials, theTime t
|
||||
signedHeaders := SignV4a(req.Header, req.Method, req.URL, body, service, creds, now)
|
||||
req.Header = signedHeaders
|
||||
} else {
|
||||
authHeader, awsHeaders := SignV4(req.Header, req.Method, req.URL, body, service, creds, now)
|
||||
authHeader, awsHeaders := SignV4(req.Header, req.Method, req.URL, body, service, creds, now, false)
|
||||
req.Header.Set("Authorization", authHeader)
|
||||
for k, v := range awsHeaders {
|
||||
req.Header.Add(k, v)
|
||||
@@ -115,14 +117,16 @@ func SignRequest(req *http.Request, service string, creds Credentials, theTime t
|
||||
}
|
||||
|
||||
// SignV4 modifies a map[string][]string of headers to generate an AWS V4 signature + headers based on the config/credentials provided.
|
||||
func SignV4(headers map[string][]string, method string, theURL *url.URL, body []byte, service string, awsCreds Credentials, theTime time.Time) (string, map[string]string) {
|
||||
func SignV4(headers map[string][]string, method string, theURL *url.URL, body []byte, service string,
|
||||
awsCreds Credentials, theTime time.Time, disablePayloadSigning bool) (string, map[string]string) {
|
||||
// General ref. https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html
|
||||
// S3 ref. https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-auth-using-authorization-header.html
|
||||
// APIGateway ref. https://docs.aws.amazon.com/apigateway/api-reference/signing-requests/
|
||||
bodyHexHash := fmt.Sprintf("%x", sha256.Sum256(body))
|
||||
|
||||
now := theTime.UTC()
|
||||
|
||||
contentSha256 := getContentHash(disablePayloadSigning, body)
|
||||
|
||||
// V4 signing has specific ideas of how it wants to see dates/times encoded
|
||||
dateNow := now.Format("20060102")
|
||||
iso8601Now := now.Format("20060102T150405Z")
|
||||
@@ -134,7 +138,7 @@ func SignV4(headers map[string][]string, method string, theURL *url.URL, body []
|
||||
|
||||
// s3 and glacier require the extra x-amz-content-sha256 header. other services do not.
|
||||
if service == "s3" || service == "glacier" {
|
||||
awsHeaders["x-amz-content-sha256"] = bodyHexHash
|
||||
awsHeaders[amzContentSha256Key] = contentSha256
|
||||
}
|
||||
|
||||
// the security token header is necessary for ephemeral credentials, e.g. from
|
||||
@@ -173,7 +177,7 @@ func SignV4(headers map[string][]string, method string, theURL *url.URL, body []
|
||||
// include the list of the signed headers
|
||||
headerList := strings.Join(orderedKeys, ";")
|
||||
canonicalReq += headerList + "\n"
|
||||
canonicalReq += bodyHexHash
|
||||
canonicalReq += contentSha256
|
||||
|
||||
// the "string to sign" is a time-bounded, scoped request token which
|
||||
// is linked to the "canonical request" by inclusion of its SHA-256 hash
|
||||
@@ -202,3 +206,11 @@ func SignV4(headers map[string][]string, method string, theURL *url.URL, body []
|
||||
|
||||
return authHeader, awsHeaders
|
||||
}
|
||||
|
||||
// getContentHash returns UNSIGNED-PAYLOAD if payload signing is disabled else will compute sha256 from body
|
||||
func getContentHash(disablePayloadSigning bool, body []byte) string {
|
||||
if disablePayloadSigning {
|
||||
return v4.UnsignedPayload
|
||||
}
|
||||
return fmt.Sprintf("%x", sha256.Sum256(body))
|
||||
}
|
||||
|
||||
8
vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go
generated
vendored
8
vendor/github.com/open-policy-agent/opa/internal/providers/aws/signing_v4a.go
generated
vendored
@@ -32,6 +32,7 @@ const (
|
||||
amzSecurityTokenKey = v4Internal.AmzSecurityTokenKey
|
||||
amzDateKey = v4Internal.AmzDateKey
|
||||
authorizationHeader = "Authorization"
|
||||
amzContentSha256Key = "x-amz-content-sha256"
|
||||
|
||||
signingAlgorithm = "AWS4-ECDSA-P256-SHA256"
|
||||
|
||||
@@ -192,7 +193,7 @@ func (s *httpSigner) Build() (signedRequest, error) {
|
||||
|
||||
// seemingly required by S3/MRAP -- 403 Forbidden otherwise
|
||||
headers.Set("host", req.URL.Host)
|
||||
headers.Set("x-amz-content-sha256", s.PayloadHash)
|
||||
headers.Set(amzContentSha256Key, s.PayloadHash)
|
||||
|
||||
s.setRequiredSigningFields(headers, query)
|
||||
|
||||
@@ -381,8 +382,7 @@ type signedRequest struct {
|
||||
|
||||
// SignV4a returns a map[string][]string of headers, including an added AWS V4a signature based on the config/credentials provided.
|
||||
func SignV4a(headers map[string][]string, method string, theURL *url.URL, body []byte, service string, awsCreds Credentials, theTime time.Time) map[string][]string {
|
||||
bodyHexHash := fmt.Sprintf("%x", sha256.Sum256(body))
|
||||
|
||||
contentSha256 := getContentHash(false, body)
|
||||
key, err := retrievePrivateKey(awsCreds)
|
||||
if err != nil {
|
||||
return map[string][]string{}
|
||||
@@ -394,7 +394,7 @@ func SignV4a(headers map[string][]string, method string, theURL *url.URL, body [
|
||||
|
||||
signer := &httpSigner{
|
||||
Request: req,
|
||||
PayloadHash: bodyHexHash,
|
||||
PayloadHash: contentSha256,
|
||||
ServiceName: service,
|
||||
RegionSet: []string{"*"},
|
||||
Credentials: key,
|
||||
|
||||
14
vendor/github.com/open-policy-agent/opa/plugins/rest/auth.go
generated
vendored
14
vendor/github.com/open-policy-agent/opa/plugins/rest/auth.go
generated
vendored
@@ -541,6 +541,7 @@ func (ap *oauth2ClientCredentialsAuthPlugin) requestToken(ctx context.Context) (
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer response.Body.Close()
|
||||
|
||||
bodyRaw, err := io.ReadAll(response.Body)
|
||||
if err != nil {
|
||||
@@ -700,6 +701,7 @@ func (ap *clientTLSAuthPlugin) Prepare(req *http.Request) error {
|
||||
type awsSigningAuthPlugin struct {
|
||||
AWSEnvironmentCredentials *awsEnvironmentCredentialService `json:"environment_credentials,omitempty"`
|
||||
AWSMetadataCredentials *awsMetadataCredentialService `json:"metadata_credentials,omitempty"`
|
||||
AWSAssumeRoleCredentials *awsAssumeRoleCredentialService `json:"assume_role_credentials,omitempty"`
|
||||
AWSWebIdentityCredentials *awsWebIdentityCredentialService `json:"web_identity_credentials,omitempty"`
|
||||
AWSProfileCredentials *awsProfileCredentialService `json:"profile_credentials,omitempty"`
|
||||
|
||||
@@ -796,6 +798,11 @@ func (ap *awsSigningAuthPlugin) awsCredentialService() awsCredentialService {
|
||||
chain.addService(ap.AWSEnvironmentCredentials)
|
||||
}
|
||||
|
||||
if ap.AWSAssumeRoleCredentials != nil {
|
||||
ap.AWSAssumeRoleCredentials.logger = ap.logger
|
||||
chain.addService(ap.AWSAssumeRoleCredentials)
|
||||
}
|
||||
|
||||
if ap.AWSWebIdentityCredentials != nil {
|
||||
ap.AWSWebIdentityCredentials.logger = ap.logger
|
||||
chain.addService(ap.AWSWebIdentityCredentials)
|
||||
@@ -851,6 +858,7 @@ func (ap *awsSigningAuthPlugin) validateAndSetDefaults(serviceType string) error
|
||||
cfgs := map[bool]int{}
|
||||
cfgs[ap.AWSEnvironmentCredentials != nil]++
|
||||
cfgs[ap.AWSMetadataCredentials != nil]++
|
||||
cfgs[ap.AWSAssumeRoleCredentials != nil]++
|
||||
cfgs[ap.AWSWebIdentityCredentials != nil]++
|
||||
cfgs[ap.AWSProfileCredentials != nil]++
|
||||
|
||||
@@ -864,6 +872,12 @@ func (ap *awsSigningAuthPlugin) validateAndSetDefaults(serviceType string) error
|
||||
}
|
||||
}
|
||||
|
||||
if ap.AWSAssumeRoleCredentials != nil {
|
||||
if err := ap.AWSAssumeRoleCredentials.populateFromEnv(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if ap.AWSWebIdentityCredentials != nil {
|
||||
if err := ap.AWSWebIdentityCredentials.populateFromEnv(); err != nil {
|
||||
return err
|
||||
|
||||
201
vendor/github.com/open-policy-agent/opa/plugins/rest/aws.go
generated
vendored
201
vendor/github.com/open-policy-agent/opa/plugins/rest/aws.go
generated
vendored
@@ -318,6 +318,169 @@ func (cs *awsMetadataCredentialService) credentials(ctx context.Context) (aws.Cr
|
||||
return cs.creds, nil
|
||||
}
|
||||
|
||||
// awsAssumeRoleCredentialService represents a STS credential service that uses active IAM credentials
|
||||
// to obtain temporary security credentials generated by AWS STS via AssumeRole API operation
|
||||
type awsAssumeRoleCredentialService struct {
|
||||
RegionName string `json:"aws_region"`
|
||||
RoleArn string `json:"iam_role_arn"`
|
||||
SessionName string `json:"session_name"`
|
||||
Domain string `json:"aws_domain"`
|
||||
AWSSigningPlugin *awsSigningAuthPlugin `json:"aws_signing,omitempty"`
|
||||
stsURL string
|
||||
creds aws.Credentials
|
||||
expiration time.Time
|
||||
logger logging.Logger
|
||||
}
|
||||
|
||||
func (cs *awsAssumeRoleCredentialService) populateFromEnv() error {
|
||||
if cs.AWSSigningPlugin == nil {
|
||||
return errors.New("a AWS signing plugin must be specified when AssumeRole credential provider is enabled")
|
||||
}
|
||||
|
||||
switch {
|
||||
case cs.AWSSigningPlugin.AWSEnvironmentCredentials != nil:
|
||||
case cs.AWSSigningPlugin.AWSProfileCredentials != nil:
|
||||
case cs.AWSSigningPlugin.AWSMetadataCredentials != nil:
|
||||
default:
|
||||
return errors.New("unsupported AWS signing plugin with AssumeRole credential provider")
|
||||
}
|
||||
|
||||
if cs.AWSSigningPlugin.AWSMetadataCredentials != nil {
|
||||
if cs.AWSSigningPlugin.AWSMetadataCredentials.RegionName == "" {
|
||||
if cs.AWSSigningPlugin.AWSMetadataCredentials.RegionName = os.Getenv(awsRegionEnvVar); cs.AWSSigningPlugin.AWSMetadataCredentials.RegionName == "" {
|
||||
return errors.New("no " + awsRegionEnvVar + " set in environment or configuration")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cs.AWSSigningPlugin.AWSSignatureVersion == "" {
|
||||
cs.AWSSigningPlugin.AWSSignatureVersion = "4"
|
||||
}
|
||||
|
||||
if cs.Domain == "" {
|
||||
cs.Domain = os.Getenv(awsDomainEnvVar)
|
||||
}
|
||||
|
||||
if cs.RegionName == "" {
|
||||
if cs.RegionName = os.Getenv(awsRegionEnvVar); cs.RegionName == "" {
|
||||
return errors.New("no " + awsRegionEnvVar + " set in environment or configuration")
|
||||
}
|
||||
}
|
||||
|
||||
if cs.RoleArn == "" {
|
||||
if cs.RoleArn = os.Getenv(awsRoleArnEnvVar); cs.RoleArn == "" {
|
||||
return errors.New("no " + awsRoleArnEnvVar + " set in environment or configuration")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cs *awsAssumeRoleCredentialService) signingCredentials(ctx context.Context) (aws.Credentials, error) {
|
||||
if cs.AWSSigningPlugin.AWSEnvironmentCredentials != nil {
|
||||
cs.AWSSigningPlugin.AWSEnvironmentCredentials.logger = cs.logger
|
||||
return cs.AWSSigningPlugin.AWSEnvironmentCredentials.credentials(ctx)
|
||||
}
|
||||
|
||||
if cs.AWSSigningPlugin.AWSProfileCredentials != nil {
|
||||
cs.AWSSigningPlugin.AWSProfileCredentials.logger = cs.logger
|
||||
return cs.AWSSigningPlugin.AWSProfileCredentials.credentials(ctx)
|
||||
}
|
||||
|
||||
cs.AWSSigningPlugin.AWSMetadataCredentials.logger = cs.logger
|
||||
return cs.AWSSigningPlugin.AWSMetadataCredentials.credentials(ctx)
|
||||
}
|
||||
|
||||
func (cs *awsAssumeRoleCredentialService) stsPath() string {
|
||||
return getSTSPath(cs.Domain, cs.stsURL, cs.RegionName)
|
||||
}
|
||||
|
||||
func (cs *awsAssumeRoleCredentialService) refreshFromService(ctx context.Context) error {
|
||||
// define the expected JSON payload from the EC2 credential service
|
||||
// ref. https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html
|
||||
type responsePayload struct {
|
||||
Result struct {
|
||||
Credentials struct {
|
||||
SessionToken string
|
||||
SecretAccessKey string
|
||||
Expiration time.Time
|
||||
AccessKeyID string `xml:"AccessKeyId"`
|
||||
}
|
||||
} `xml:"AssumeRoleResult"`
|
||||
}
|
||||
|
||||
// short circuit if a reasonable amount of time until credential expiration remains
|
||||
if time.Now().Add(time.Minute * 5).Before(cs.expiration) {
|
||||
cs.logger.Debug("Credentials previously obtained from sts service still valid.")
|
||||
return nil
|
||||
}
|
||||
|
||||
cs.logger.Debug("Obtaining credentials from sts for role %s.", cs.RoleArn)
|
||||
|
||||
var sessionName string
|
||||
if cs.SessionName == "" {
|
||||
sessionName = "open-policy-agent"
|
||||
} else {
|
||||
sessionName = cs.SessionName
|
||||
}
|
||||
|
||||
queryVals := url.Values{
|
||||
"Action": []string{"AssumeRole"},
|
||||
"RoleSessionName": []string{sessionName},
|
||||
"RoleArn": []string{cs.RoleArn},
|
||||
"Version": []string{"2011-06-15"},
|
||||
}
|
||||
stsRequestURL, _ := url.Parse(cs.stsPath())
|
||||
|
||||
// construct an HTTP client with a reasonably short timeout
|
||||
client := &http.Client{Timeout: time.Second * 10}
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost, stsRequestURL.String(), strings.NewReader(queryVals.Encode()))
|
||||
if err != nil {
|
||||
return errors.New("unable to construct STS HTTP request: " + err.Error())
|
||||
}
|
||||
|
||||
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
|
||||
|
||||
// Note: Calls to AWS STS AssumeRole must be signed using the access key ID
|
||||
// and secret access key
|
||||
signingCreds, err := cs.signingCredentials(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = aws.SignRequest(req, "sts", signingCreds, time.Now(), cs.AWSSigningPlugin.AWSSignatureVersion)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
body, err := aws.DoRequestWithClient(req, client, "STS", cs.logger)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var payload responsePayload
|
||||
err = xml.Unmarshal(body, &payload)
|
||||
if err != nil {
|
||||
return errors.New("failed to parse credential response from STS service: " + err.Error())
|
||||
}
|
||||
|
||||
cs.expiration = payload.Result.Credentials.Expiration
|
||||
cs.creds.AccessKey = payload.Result.Credentials.AccessKeyID
|
||||
cs.creds.SecretKey = payload.Result.Credentials.SecretAccessKey
|
||||
cs.creds.SessionToken = payload.Result.Credentials.SessionToken
|
||||
cs.creds.RegionName = cs.RegionName
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cs *awsAssumeRoleCredentialService) credentials(ctx context.Context) (aws.Credentials, error) {
|
||||
err := cs.refreshFromService(ctx)
|
||||
if err != nil {
|
||||
return cs.creds, err
|
||||
}
|
||||
return cs.creds, nil
|
||||
}
|
||||
|
||||
// awsWebIdentityCredentialService represents an STS WebIdentity credential services
|
||||
type awsWebIdentityCredentialService struct {
|
||||
RoleArn string
|
||||
@@ -354,23 +517,7 @@ func (cs *awsWebIdentityCredentialService) populateFromEnv() error {
|
||||
}
|
||||
|
||||
func (cs *awsWebIdentityCredentialService) stsPath() string {
|
||||
var domain string
|
||||
if cs.Domain != "" {
|
||||
domain = strings.ToLower(cs.Domain)
|
||||
} else {
|
||||
domain = stsDefaultDomain
|
||||
}
|
||||
|
||||
var stsPath string
|
||||
switch {
|
||||
case cs.stsURL != "":
|
||||
stsPath = cs.stsURL
|
||||
case cs.RegionName != "":
|
||||
stsPath = fmt.Sprintf(stsRegionPath, strings.ToLower(cs.RegionName), domain)
|
||||
default:
|
||||
stsPath = fmt.Sprintf(stsDefaultPath, domain)
|
||||
}
|
||||
return stsPath
|
||||
return getSTSPath(cs.Domain, cs.stsURL, cs.RegionName)
|
||||
}
|
||||
|
||||
func (cs *awsWebIdentityCredentialService) refreshFromService(ctx context.Context) error {
|
||||
@@ -555,3 +702,23 @@ func (ap *awsKMSSignPlugin) SignDigest(ctx context.Context, digest []byte, keyID
|
||||
|
||||
return signature, nil
|
||||
}
|
||||
|
||||
func getSTSPath(stsDomain, stsURL, regionName string) string {
|
||||
var domain string
|
||||
if stsDomain != "" {
|
||||
domain = strings.ToLower(stsDomain)
|
||||
} else {
|
||||
domain = stsDefaultDomain
|
||||
}
|
||||
|
||||
var stsPath string
|
||||
switch {
|
||||
case stsURL != "":
|
||||
stsPath = stsURL
|
||||
case regionName != "":
|
||||
stsPath = fmt.Sprintf(stsRegionPath, strings.ToLower(regionName), domain)
|
||||
default:
|
||||
stsPath = fmt.Sprintf(stsDefaultPath, domain)
|
||||
}
|
||||
return stsPath
|
||||
}
|
||||
|
||||
75
vendor/github.com/open-policy-agent/opa/rego/rego.go
generated
vendored
75
vendor/github.com/open-policy-agent/opa/rego/rego.go
generated
vendored
@@ -1761,14 +1761,15 @@ func (r *Rego) prepare(ctx context.Context, qType queryType, extras []extraStage
|
||||
return err
|
||||
}
|
||||
|
||||
futureImports := []*ast.Import{}
|
||||
queryImports := []*ast.Import{}
|
||||
for _, imp := range imports {
|
||||
if imp.Path.Value.(ast.Ref).HasPrefix(ast.Ref([]*ast.Term{ast.FutureRootDocument})) {
|
||||
futureImports = append(futureImports, imp)
|
||||
path := imp.Path.Value.(ast.Ref)
|
||||
if path.HasPrefix([]*ast.Term{ast.FutureRootDocument}) || path.HasPrefix([]*ast.Term{ast.RegoRootDocument}) {
|
||||
queryImports = append(queryImports, imp)
|
||||
}
|
||||
}
|
||||
|
||||
r.parsedQuery, err = r.parseQuery(futureImports, r.metrics)
|
||||
r.parsedQuery, err = r.parseQuery(queryImports, r.metrics)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -1921,7 +1922,7 @@ func (r *Rego) parseRawInput(rawInput *interface{}, m metrics.Metrics) (ast.Valu
|
||||
return ast.InterfaceToValue(*rawPtr)
|
||||
}
|
||||
|
||||
func (r *Rego) parseQuery(futureImports []*ast.Import, m metrics.Metrics) (ast.Body, error) {
|
||||
func (r *Rego) parseQuery(queryImports []*ast.Import, m metrics.Metrics) (ast.Body, error) {
|
||||
if r.parsedQuery != nil {
|
||||
return r.parsedQuery, nil
|
||||
}
|
||||
@@ -1929,7 +1930,11 @@ func (r *Rego) parseQuery(futureImports []*ast.Import, m metrics.Metrics) (ast.B
|
||||
m.Timer(metrics.RegoQueryParse).Start()
|
||||
defer m.Timer(metrics.RegoQueryParse).Stop()
|
||||
|
||||
popts, err := future.ParserOptionsFromFutureImports(futureImports)
|
||||
popts, err := future.ParserOptionsFromFutureImports(queryImports)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
popts, err = parserOptionsFromRegoVersionImport(queryImports, popts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1937,6 +1942,17 @@ func (r *Rego) parseQuery(futureImports []*ast.Import, m metrics.Metrics) (ast.B
|
||||
return ast.ParseBodyWithOpts(r.query, popts)
|
||||
}
|
||||
|
||||
func parserOptionsFromRegoVersionImport(imports []*ast.Import, popts ast.ParserOptions) (ast.ParserOptions, error) {
|
||||
for _, imp := range imports {
|
||||
path := imp.Path.Value.(ast.Ref)
|
||||
if ast.Compare(path, ast.RegoV1CompatibleRef) == 0 {
|
||||
popts.RegoVersion = ast.RegoV1
|
||||
return popts, nil
|
||||
}
|
||||
}
|
||||
return popts, nil
|
||||
}
|
||||
|
||||
func (r *Rego) compileModules(ctx context.Context, txn storage.Transaction, m metrics.Metrics) error {
|
||||
|
||||
// Only compile again if there are new modules.
|
||||
@@ -2405,6 +2421,53 @@ func (r *Rego) partial(ctx context.Context, ectx *EvalContext) (*PartialQueries,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if r.regoVersion == ast.RegoV0 && (r.capabilities == nil || r.capabilities.ContainsFeature(ast.FeatureRegoV1Import)) {
|
||||
// If the target rego-version in v0, and the rego.v1 import is available, then we attempt to apply it to support modules.
|
||||
|
||||
for i, mod := range support {
|
||||
if mod.RegoVersion() != ast.RegoV0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// We can't apply the RegoV0CompatV1 version to the support module if it contains rules or vars that
|
||||
// conflict with future keywords.
|
||||
applyRegoVersion := true
|
||||
|
||||
ast.WalkRules(mod, func(r *ast.Rule) bool {
|
||||
name := r.Head.Name
|
||||
if name == "" && len(r.Head.Reference) > 0 {
|
||||
name = r.Head.Reference[0].Value.(ast.Var)
|
||||
}
|
||||
if ast.IsFutureKeyword(name.String()) {
|
||||
applyRegoVersion = false
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
if applyRegoVersion {
|
||||
ast.WalkVars(mod, func(v ast.Var) bool {
|
||||
if ast.IsFutureKeyword(v.String()) {
|
||||
applyRegoVersion = false
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
if applyRegoVersion {
|
||||
support[i].SetRegoVersion(ast.RegoV0CompatV1)
|
||||
} else {
|
||||
support[i].SetRegoVersion(r.regoVersion)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If the target rego-version is not v0, then we apply the target rego-version to the support modules.
|
||||
for i := range support {
|
||||
support[i].SetRegoVersion(r.regoVersion)
|
||||
}
|
||||
}
|
||||
|
||||
pq := &PartialQueries{
|
||||
Queries: queries,
|
||||
Support: support,
|
||||
|
||||
161
vendor/github.com/open-policy-agent/opa/topdown/crypto.go
generated
vendored
161
vendor/github.com/open-policy-agent/opa/topdown/crypto.go
generated
vendored
@@ -21,6 +21,7 @@ import (
|
||||
"hash"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/open-policy-agent/opa/internal/jwx/jwk"
|
||||
|
||||
@@ -104,7 +105,7 @@ func builtinCryptoX509ParseAndVerifyCertificates(_ BuiltinContext, operands []*a
|
||||
return iter(invalid)
|
||||
}
|
||||
|
||||
verified, err := verifyX509CertificateChain(certs)
|
||||
verified, err := verifyX509CertificateChain(certs, x509.VerifyOptions{})
|
||||
if err != nil {
|
||||
return iter(invalid)
|
||||
}
|
||||
@@ -122,6 +123,153 @@ func builtinCryptoX509ParseAndVerifyCertificates(_ BuiltinContext, operands []*a
|
||||
return iter(valid)
|
||||
}
|
||||
|
||||
var allowedKeyUsages = map[string]x509.ExtKeyUsage{
|
||||
"KeyUsageAny": x509.ExtKeyUsageAny,
|
||||
"KeyUsageServerAuth": x509.ExtKeyUsageServerAuth,
|
||||
"KeyUsageClientAuth": x509.ExtKeyUsageClientAuth,
|
||||
"KeyUsageCodeSigning": x509.ExtKeyUsageCodeSigning,
|
||||
"KeyUsageEmailProtection": x509.ExtKeyUsageEmailProtection,
|
||||
"KeyUsageIPSECEndSystem": x509.ExtKeyUsageIPSECEndSystem,
|
||||
"KeyUsageIPSECTunnel": x509.ExtKeyUsageIPSECTunnel,
|
||||
"KeyUsageIPSECUser": x509.ExtKeyUsageIPSECUser,
|
||||
"KeyUsageTimeStamping": x509.ExtKeyUsageTimeStamping,
|
||||
"KeyUsageOCSPSigning": x509.ExtKeyUsageOCSPSigning,
|
||||
"KeyUsageMicrosoftServerGatedCrypto": x509.ExtKeyUsageMicrosoftServerGatedCrypto,
|
||||
"KeyUsageNetscapeServerGatedCrypto": x509.ExtKeyUsageNetscapeServerGatedCrypto,
|
||||
"KeyUsageMicrosoftCommercialCodeSigning": x509.ExtKeyUsageMicrosoftCommercialCodeSigning,
|
||||
"KeyUsageMicrosoftKernelCodeSigning": x509.ExtKeyUsageMicrosoftKernelCodeSigning,
|
||||
}
|
||||
|
||||
func builtinCryptoX509ParseAndVerifyCertificatesWithOptions(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
|
||||
input, err := builtins.StringOperand(operands[0].Value, 1)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
options, err := builtins.ObjectOperand(operands[1].Value, 2)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
invalid := ast.ArrayTerm(
|
||||
ast.BooleanTerm(false),
|
||||
ast.NewTerm(ast.NewArray()),
|
||||
)
|
||||
|
||||
certs, err := getX509CertsFromString(string(input))
|
||||
if err != nil {
|
||||
return iter(invalid)
|
||||
}
|
||||
|
||||
// Collect the cert verification options
|
||||
verifyOpt, err := extractVerifyOpts(options)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
verified, err := verifyX509CertificateChain(certs, verifyOpt)
|
||||
if err != nil {
|
||||
return iter(invalid)
|
||||
}
|
||||
|
||||
value, err := ast.InterfaceToValue(verified)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
valid := ast.ArrayTerm(
|
||||
ast.BooleanTerm(true),
|
||||
ast.NewTerm(value),
|
||||
)
|
||||
|
||||
return iter(valid)
|
||||
}
|
||||
|
||||
func extractVerifyOpts(options ast.Object) (verifyOpt x509.VerifyOptions, err error) {
|
||||
|
||||
for _, key := range options.Keys() {
|
||||
k, err := ast.JSON(key.Value)
|
||||
if err != nil {
|
||||
return verifyOpt, err
|
||||
}
|
||||
k, ok := k.(string)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
switch k {
|
||||
case "DNSName":
|
||||
dns, ok := options.Get(key).Value.(ast.String)
|
||||
if ok {
|
||||
verifyOpt.DNSName = strings.Trim(string(dns), "\"")
|
||||
} else {
|
||||
return verifyOpt, fmt.Errorf("'DNSName' should be a string")
|
||||
}
|
||||
case "CurrentTime":
|
||||
c, ok := options.Get(key).Value.(ast.Number)
|
||||
if ok {
|
||||
nanosecs, ok := c.Int64()
|
||||
if ok {
|
||||
verifyOpt.CurrentTime = time.Unix(0, nanosecs)
|
||||
} else {
|
||||
return verifyOpt, fmt.Errorf("'CurrentTime' should be a valid int64 number")
|
||||
}
|
||||
} else {
|
||||
return verifyOpt, fmt.Errorf("'CurrentTime' should be a number")
|
||||
}
|
||||
case "MaxConstraintComparisons":
|
||||
c, ok := options.Get(key).Value.(ast.Number)
|
||||
if ok {
|
||||
maxComparisons, ok := c.Int()
|
||||
if ok {
|
||||
verifyOpt.MaxConstraintComparisions = maxComparisons
|
||||
} else {
|
||||
return verifyOpt, fmt.Errorf("'MaxConstraintComparisons' should be a valid number")
|
||||
}
|
||||
} else {
|
||||
return verifyOpt, fmt.Errorf("'MaxConstraintComparisons' should be a number")
|
||||
}
|
||||
case "KeyUsages":
|
||||
type forEach interface {
|
||||
Foreach(func(*ast.Term))
|
||||
}
|
||||
var ks forEach
|
||||
switch options.Get(key).Value.(type) {
|
||||
case *ast.Array:
|
||||
ks = options.Get(key).Value.(*ast.Array)
|
||||
case ast.Set:
|
||||
ks = options.Get(key).Value.(ast.Set)
|
||||
default:
|
||||
return verifyOpt, fmt.Errorf("'KeyUsages' should be an Array or Set")
|
||||
}
|
||||
|
||||
// Collect the x509.ExtKeyUsage values by looking up the
|
||||
// mapping of key usage strings to x509.ExtKeyUsage
|
||||
var invalidKUsgs []string
|
||||
ks.Foreach(func(t *ast.Term) {
|
||||
u, ok := t.Value.(ast.String)
|
||||
if ok {
|
||||
v := strings.Trim(string(u), "\"")
|
||||
if k, ok := allowedKeyUsages[v]; ok {
|
||||
verifyOpt.KeyUsages = append(verifyOpt.KeyUsages, k)
|
||||
} else {
|
||||
invalidKUsgs = append(invalidKUsgs, v)
|
||||
}
|
||||
}
|
||||
})
|
||||
if len(invalidKUsgs) > 0 {
|
||||
return x509.VerifyOptions{}, fmt.Errorf("invalid entries for 'KeyUsages' found: %s", invalidKUsgs)
|
||||
}
|
||||
default:
|
||||
return verifyOpt, fmt.Errorf("invalid key option")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return verifyOpt, nil
|
||||
}
|
||||
|
||||
func builtinCryptoX509ParseKeyPair(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
certificate, err := builtins.StringOperand(operands[0].Value, 1)
|
||||
if err != nil {
|
||||
@@ -380,6 +528,7 @@ func builtinCryptoHmacEqual(_ BuiltinContext, operands []*ast.Term, iter func(*a
|
||||
func init() {
|
||||
RegisterBuiltinFunc(ast.CryptoX509ParseCertificates.Name, builtinCryptoX509ParseCertificates)
|
||||
RegisterBuiltinFunc(ast.CryptoX509ParseAndVerifyCertificates.Name, builtinCryptoX509ParseAndVerifyCertificates)
|
||||
RegisterBuiltinFunc(ast.CryptoX509ParseAndVerifyCertificatesWithOptions.Name, builtinCryptoX509ParseAndVerifyCertificatesWithOptions)
|
||||
RegisterBuiltinFunc(ast.CryptoMd5.Name, builtinCryptoMd5)
|
||||
RegisterBuiltinFunc(ast.CryptoSha1.Name, builtinCryptoSha1)
|
||||
RegisterBuiltinFunc(ast.CryptoSha256.Name, builtinCryptoSha256)
|
||||
@@ -394,7 +543,7 @@ func init() {
|
||||
RegisterBuiltinFunc(ast.CryptoHmacEqual.Name, builtinCryptoHmacEqual)
|
||||
}
|
||||
|
||||
func verifyX509CertificateChain(certs []*x509.Certificate) ([]*x509.Certificate, error) {
|
||||
func verifyX509CertificateChain(certs []*x509.Certificate, vo x509.VerifyOptions) ([]*x509.Certificate, error) {
|
||||
if len(certs) < 2 {
|
||||
return nil, builtins.NewOperandErr(1, "must supply at least two certificates to be able to verify")
|
||||
}
|
||||
@@ -414,8 +563,12 @@ func verifyX509CertificateChain(certs []*x509.Certificate) ([]*x509.Certificate,
|
||||
|
||||
// verify the cert chain back to the root
|
||||
verifyOpts := x509.VerifyOptions{
|
||||
Roots: roots,
|
||||
Intermediates: intermediates,
|
||||
Roots: roots,
|
||||
Intermediates: intermediates,
|
||||
DNSName: vo.DNSName,
|
||||
CurrentTime: vo.CurrentTime,
|
||||
KeyUsages: vo.KeyUsages,
|
||||
MaxConstraintComparisions: vo.MaxConstraintComparisions,
|
||||
}
|
||||
chains, err := leaf.Verify(verifyOpts)
|
||||
if err != nil {
|
||||
|
||||
87
vendor/github.com/open-policy-agent/opa/topdown/encoding.go
generated
vendored
87
vendor/github.com/open-policy-agent/opa/topdown/encoding.go
generated
vendored
@@ -35,6 +35,92 @@ func builtinJSONMarshal(_ BuiltinContext, operands []*ast.Term, iter func(*ast.T
|
||||
return iter(ast.StringTerm(string(bs)))
|
||||
}
|
||||
|
||||
func builtinJSONMarshalWithOpts(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
|
||||
asJSON, err := ast.JSON(operands[0].Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
indentWith := "\t"
|
||||
prefixWith := ""
|
||||
implicitPrettyPrint := false
|
||||
userDeclaredExplicitPrettyPrint := false
|
||||
shouldPrettyPrint := false
|
||||
|
||||
marshalOpts, err := builtins.ObjectOperand(operands[1].Value, 2)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for idx, k := range marshalOpts.Keys() {
|
||||
|
||||
val := marshalOpts.Get(k)
|
||||
|
||||
key, err := builtins.StringOperand(k.Value, idx)
|
||||
if err != nil {
|
||||
return builtins.NewOperandErr(2, "failed to stringify key %v at index %d: %v", k, idx, err)
|
||||
}
|
||||
|
||||
switch key {
|
||||
|
||||
case "prefix":
|
||||
prefixOpt, err := builtins.StringOperand(val.Value, idx)
|
||||
if err != nil {
|
||||
return builtins.NewOperandErr(2, "key %s failed cast to string: %v", key, err)
|
||||
}
|
||||
prefixWith = string(prefixOpt)
|
||||
implicitPrettyPrint = true
|
||||
|
||||
case "indent":
|
||||
indentOpt, err := builtins.StringOperand(val.Value, idx)
|
||||
if err != nil {
|
||||
return builtins.NewOperandErr(2, "key %s failed cast to string: %v", key, err)
|
||||
|
||||
}
|
||||
indentWith = string(indentOpt)
|
||||
implicitPrettyPrint = true
|
||||
|
||||
case "pretty":
|
||||
userDeclaredExplicitPrettyPrint = true
|
||||
explicitPrettyPrint, ok := val.Value.(ast.Boolean)
|
||||
if !ok {
|
||||
return builtins.NewOperandErr(2, "key %s failed cast to bool", key)
|
||||
}
|
||||
|
||||
shouldPrettyPrint = bool(explicitPrettyPrint)
|
||||
|
||||
default:
|
||||
return builtins.NewOperandErr(2, "object contained unknown key %s", key)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if !userDeclaredExplicitPrettyPrint {
|
||||
shouldPrettyPrint = implicitPrettyPrint
|
||||
}
|
||||
|
||||
var bs []byte
|
||||
|
||||
if shouldPrettyPrint {
|
||||
bs, err = json.MarshalIndent(asJSON, prefixWith, indentWith)
|
||||
} else {
|
||||
bs, err = json.Marshal(asJSON)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if shouldPrettyPrint {
|
||||
// json.MarshalIndent() function will not prefix the first line of emitted JSON
|
||||
return iter(ast.StringTerm(prefixWith + string(bs)))
|
||||
}
|
||||
|
||||
return iter(ast.StringTerm(string(bs)))
|
||||
|
||||
}
|
||||
|
||||
func builtinJSONUnmarshal(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error {
|
||||
|
||||
str, err := builtins.StringOperand(operands[0].Value, 1)
|
||||
@@ -299,6 +385,7 @@ func builtinHexDecode(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter
|
||||
|
||||
func init() {
|
||||
RegisterBuiltinFunc(ast.JSONMarshal.Name, builtinJSONMarshal)
|
||||
RegisterBuiltinFunc(ast.JSONMarshalWithOptions.Name, builtinJSONMarshalWithOpts)
|
||||
RegisterBuiltinFunc(ast.JSONUnmarshal.Name, builtinJSONUnmarshal)
|
||||
RegisterBuiltinFunc(ast.JSONIsValid.Name, builtinJSONIsValid)
|
||||
RegisterBuiltinFunc(ast.Base64Encode.Name, builtinBase64Encode)
|
||||
|
||||
248
vendor/github.com/open-policy-agent/opa/topdown/eval.go
generated
vendored
248
vendor/github.com/open-policy-agent/opa/topdown/eval.go
generated
vendored
@@ -2,6 +2,7 @@ package topdown
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
@@ -50,6 +51,12 @@ func (ee *earlyExitError) Error() string {
|
||||
return fmt.Sprintf("%v: early exit", ee.e.query)
|
||||
}
|
||||
|
||||
type deferredEarlyExitError earlyExitError
|
||||
|
||||
func (ee deferredEarlyExitError) Error() string {
|
||||
return fmt.Sprintf("%v: deferred early exit", ee.e.query)
|
||||
}
|
||||
|
||||
type eval struct {
|
||||
ctx context.Context
|
||||
metrics metrics.Metrics
|
||||
@@ -307,6 +314,13 @@ func (e *eval) eval(iter evalIterator) error {
|
||||
}
|
||||
|
||||
func (e *eval) evalExpr(iter evalIterator) error {
|
||||
wrapErr := func(err error) error {
|
||||
if !e.findOne {
|
||||
// The current rule/function doesn't support EE, but a caller (somewhere down the call stack) does.
|
||||
return &deferredEarlyExitError{prev: err, e: e}
|
||||
}
|
||||
return &earlyExitError{prev: err, e: e}
|
||||
}
|
||||
|
||||
if e.cancel != nil && e.cancel.Cancelled() {
|
||||
return &Error{
|
||||
@@ -317,16 +331,18 @@ func (e *eval) evalExpr(iter evalIterator) error {
|
||||
|
||||
if e.index >= len(e.query) {
|
||||
err := iter(e)
|
||||
|
||||
if err != nil {
|
||||
ee, ok := err.(*earlyExitError)
|
||||
if !ok {
|
||||
switch err := err.(type) {
|
||||
case *deferredEarlyExitError:
|
||||
return wrapErr(err)
|
||||
case *earlyExitError:
|
||||
return wrapErr(err)
|
||||
default:
|
||||
return err
|
||||
}
|
||||
if !e.findOne {
|
||||
return nil
|
||||
}
|
||||
return &earlyExitError{prev: ee, e: e}
|
||||
}
|
||||
|
||||
if e.findOne && !e.partial() { // we've found one!
|
||||
return &earlyExitError{e: e}
|
||||
}
|
||||
@@ -1826,7 +1842,7 @@ func (e evalFunc) eval(iter unifyIterator) error {
|
||||
}
|
||||
}
|
||||
|
||||
return suppressEarlyExit(e.evalValue(iter, argCount, e.ir.EarlyExit))
|
||||
return e.evalValue(iter, argCount, e.ir.EarlyExit)
|
||||
}
|
||||
|
||||
func (e evalFunc) evalValue(iter unifyIterator, argCount int, findOne bool) error {
|
||||
@@ -1844,33 +1860,52 @@ func (e evalFunc) evalValue(iter unifyIterator, argCount int, findOne bool) erro
|
||||
|
||||
var prev *ast.Term
|
||||
|
||||
for _, rule := range e.ir.Rules {
|
||||
next, err := e.evalOneRule(iter, rule, cacheKey, prev, findOne)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if next == nil {
|
||||
for _, erule := range e.ir.Else[rule] {
|
||||
next, err = e.evalOneRule(iter, erule, cacheKey, prev, findOne)
|
||||
if err != nil {
|
||||
return withSuppressEarlyExit(func() error {
|
||||
var outerEe *deferredEarlyExitError
|
||||
for _, rule := range e.ir.Rules {
|
||||
next, err := e.evalOneRule(iter, rule, cacheKey, prev, findOne)
|
||||
if err != nil {
|
||||
if oee, ok := err.(*deferredEarlyExitError); ok {
|
||||
if outerEe == nil {
|
||||
outerEe = oee
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
if next != nil {
|
||||
break
|
||||
}
|
||||
if next == nil {
|
||||
for _, erule := range e.ir.Else[rule] {
|
||||
next, err = e.evalOneRule(iter, erule, cacheKey, prev, findOne)
|
||||
if err != nil {
|
||||
if oee, ok := err.(*deferredEarlyExitError); ok {
|
||||
if outerEe == nil {
|
||||
outerEe = oee
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if next != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if next != nil {
|
||||
prev = next
|
||||
}
|
||||
}
|
||||
if next != nil {
|
||||
prev = next
|
||||
|
||||
if e.ir.Default != nil && prev == nil {
|
||||
_, err := e.evalOneRule(iter, e.ir.Default, cacheKey, prev, findOne)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if e.ir.Default != nil && prev == nil {
|
||||
_, err := e.evalOneRule(iter, e.ir.Default, cacheKey, prev, findOne)
|
||||
return err
|
||||
}
|
||||
if outerEe != nil {
|
||||
return outerEe
|
||||
}
|
||||
|
||||
return nil
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (e evalFunc) evalCache(argCount int, iter unifyIterator) (ast.Ref, bool, error) {
|
||||
@@ -2129,6 +2164,18 @@ func (e evalTree) enumerate(iter unifyIterator) error {
|
||||
return err
|
||||
}
|
||||
|
||||
var deferredEe *deferredEarlyExitError
|
||||
handleErr := func(err error) error {
|
||||
var dee *deferredEarlyExitError
|
||||
if errors.As(err, &dee) {
|
||||
if deferredEe == nil {
|
||||
deferredEe = dee
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
if doc != nil {
|
||||
switch doc := doc.(type) {
|
||||
case *ast.Array:
|
||||
@@ -2137,31 +2184,37 @@ func (e evalTree) enumerate(iter unifyIterator) error {
|
||||
err := e.e.biunify(k, e.ref[e.pos], e.bindings, e.bindings, func() error {
|
||||
return e.next(iter, k)
|
||||
})
|
||||
if err != nil {
|
||||
|
||||
if err := handleErr(err); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case ast.Object:
|
||||
ki := doc.KeysIterator()
|
||||
for k, more := ki.Next(); more; k, more = ki.Next() {
|
||||
if err := e.e.biunify(k, e.ref[e.pos], e.bindings, e.bindings, func() error {
|
||||
err := e.e.biunify(k, e.ref[e.pos], e.bindings, e.bindings, func() error {
|
||||
return e.next(iter, k)
|
||||
}); err != nil {
|
||||
})
|
||||
if err := handleErr(err); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case ast.Set:
|
||||
err := doc.Iter(func(elem *ast.Term) error {
|
||||
return e.e.biunify(elem, e.ref[e.pos], e.bindings, e.bindings, func() error {
|
||||
if err := doc.Iter(func(elem *ast.Term) error {
|
||||
err := e.e.biunify(elem, e.ref[e.pos], e.bindings, e.bindings, func() error {
|
||||
return e.next(iter, elem)
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return handleErr(err)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if deferredEe != nil {
|
||||
return deferredEe
|
||||
}
|
||||
|
||||
if e.node == nil {
|
||||
return nil
|
||||
}
|
||||
@@ -2926,7 +2979,7 @@ func (e evalVirtualComplete) eval(iter unifyIterator) error {
|
||||
}
|
||||
|
||||
if !e.e.unknown(e.ref, e.bindings) {
|
||||
return suppressEarlyExit(e.evalValue(iter, e.ir.EarlyExit))
|
||||
return e.evalValue(iter, e.ir.EarlyExit)
|
||||
}
|
||||
|
||||
var generateSupport bool
|
||||
@@ -2955,46 +3008,67 @@ func (e evalVirtualComplete) evalValue(iter unifyIterator, findOne bool) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// a cached result won't generate any EE from evaluating the rule, so we exempt it from EE suppression to not
|
||||
// drop EE generated by the caller (through `iter` invocation).
|
||||
if cached != nil {
|
||||
e.e.instr.counterIncr(evalOpVirtualCacheHit)
|
||||
return e.evalTerm(iter, cached, e.bindings)
|
||||
}
|
||||
|
||||
e.e.instr.counterIncr(evalOpVirtualCacheMiss)
|
||||
return withSuppressEarlyExit(func() error {
|
||||
e.e.instr.counterIncr(evalOpVirtualCacheMiss)
|
||||
|
||||
var prev *ast.Term
|
||||
var prev *ast.Term
|
||||
var deferredEe *deferredEarlyExitError
|
||||
|
||||
for _, rule := range e.ir.Rules {
|
||||
next, err := e.evalValueRule(iter, rule, prev, findOne)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if next == nil {
|
||||
for _, erule := range e.ir.Else[rule] {
|
||||
next, err = e.evalValueRule(iter, erule, prev, findOne)
|
||||
if err != nil {
|
||||
for _, rule := range e.ir.Rules {
|
||||
next, err := e.evalValueRule(iter, rule, prev, findOne)
|
||||
if err != nil {
|
||||
if dee, ok := err.(*deferredEarlyExitError); ok {
|
||||
if deferredEe == nil {
|
||||
deferredEe = dee
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
if next != nil {
|
||||
break
|
||||
}
|
||||
if next == nil {
|
||||
for _, erule := range e.ir.Else[rule] {
|
||||
next, err = e.evalValueRule(iter, erule, prev, findOne)
|
||||
if err != nil {
|
||||
if dee, ok := err.(*deferredEarlyExitError); ok {
|
||||
if deferredEe == nil {
|
||||
deferredEe = dee
|
||||
}
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if next != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if next != nil {
|
||||
prev = next
|
||||
}
|
||||
}
|
||||
if next != nil {
|
||||
prev = next
|
||||
|
||||
if e.ir.Default != nil && prev == nil {
|
||||
_, err := e.evalValueRule(iter, e.ir.Default, prev, findOne)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if e.ir.Default != nil && prev == nil {
|
||||
_, err := e.evalValueRule(iter, e.ir.Default, prev, findOne)
|
||||
return err
|
||||
}
|
||||
if prev == nil {
|
||||
e.e.virtualCache.Put(e.plugged[:e.pos+1], nil)
|
||||
}
|
||||
|
||||
if prev == nil {
|
||||
e.e.virtualCache.Put(e.plugged[:e.pos+1], nil)
|
||||
}
|
||||
if deferredEe != nil {
|
||||
return deferredEe
|
||||
}
|
||||
|
||||
return nil
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func (e evalVirtualComplete) evalValueRule(iter unifyIterator, rule *ast.Rule, prev *ast.Term, findOne bool) (*ast.Term, error) {
|
||||
@@ -3025,6 +3099,7 @@ func (e evalVirtualComplete) evalValueRule(iter unifyIterator, rule *ast.Rule, p
|
||||
return err
|
||||
}
|
||||
|
||||
// TODO: trace redo if EE-err && !findOne(?)
|
||||
child.traceRedo(rule)
|
||||
return nil
|
||||
})
|
||||
@@ -3197,6 +3272,17 @@ func (e evalTerm) next(iter unifyIterator, plugged *ast.Term) error {
|
||||
}
|
||||
|
||||
func (e evalTerm) enumerate(iter unifyIterator) error {
|
||||
var deferredEe *deferredEarlyExitError
|
||||
handleErr := func(err error) error {
|
||||
var dee *deferredEarlyExitError
|
||||
if errors.As(err, &dee) {
|
||||
if deferredEe == nil {
|
||||
deferredEe = dee
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
switch v := e.term.Value.(type) {
|
||||
case *ast.Array:
|
||||
@@ -3205,24 +3291,34 @@ func (e evalTerm) enumerate(iter unifyIterator) error {
|
||||
err := e.e.biunify(k, e.ref[e.pos], e.bindings, e.bindings, func() error {
|
||||
return e.next(iter, k)
|
||||
})
|
||||
if err != nil {
|
||||
|
||||
if err := handleErr(err); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case ast.Object:
|
||||
return v.Iter(func(k, _ *ast.Term) error {
|
||||
return e.e.biunify(k, e.ref[e.pos], e.termbindings, e.bindings, func() error {
|
||||
if err := v.Iter(func(k, _ *ast.Term) error {
|
||||
err := e.e.biunify(k, e.ref[e.pos], e.termbindings, e.bindings, func() error {
|
||||
return e.next(iter, e.termbindings.Plug(k))
|
||||
})
|
||||
})
|
||||
return handleErr(err)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
case ast.Set:
|
||||
return v.Iter(func(elem *ast.Term) error {
|
||||
return e.e.biunify(elem, e.ref[e.pos], e.termbindings, e.bindings, func() error {
|
||||
if err := v.Iter(func(elem *ast.Term) error {
|
||||
err := e.e.biunify(elem, e.ref[e.pos], e.termbindings, e.bindings, func() error {
|
||||
return e.next(iter, e.termbindings.Plug(elem))
|
||||
})
|
||||
})
|
||||
return handleErr(err)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if deferredEe != nil {
|
||||
return deferredEe
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -3332,11 +3428,15 @@ func (e evalEvery) eval(iter unifyIterator) error {
|
||||
}
|
||||
|
||||
child.traceRedo(e.expr)
|
||||
return err
|
||||
|
||||
// We don't want to abort the generator domain enumeration with EE.
|
||||
return suppressEarlyExit(err)
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if all {
|
||||
err := iter()
|
||||
domain.traceExit(e.expr)
|
||||
@@ -3662,11 +3762,19 @@ func refContainsNonScalar(ref ast.Ref) bool {
|
||||
}
|
||||
|
||||
func suppressEarlyExit(err error) error {
|
||||
ee, ok := err.(*earlyExitError)
|
||||
if !ok {
|
||||
return err
|
||||
if ee, ok := err.(*earlyExitError); ok {
|
||||
return ee.prev
|
||||
} else if oee, ok := err.(*deferredEarlyExitError); ok {
|
||||
return oee.prev
|
||||
}
|
||||
return ee.prev // nil if we're done
|
||||
return err
|
||||
}
|
||||
|
||||
func withSuppressEarlyExit(f func() error) error {
|
||||
if err := f(); err != nil {
|
||||
return suppressEarlyExit(err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *eval) updateSavedMocks(withs []*ast.With) []*ast.With {
|
||||
|
||||
14
vendor/github.com/open-policy-agent/opa/topdown/providers.go
generated
vendored
14
vendor/github.com/open-policy-agent/opa/topdown/providers.go
generated
vendored
@@ -173,7 +173,19 @@ func builtinAWSSigV4SignReq(ctx BuiltinContext, operands []*ast.Term, iter func(
|
||||
|
||||
// Sign the request object's headers, and reconstruct the headers map.
|
||||
headersMap := objectToMap(headers)
|
||||
authHeader, awsHeadersMap := aws.SignV4(headersMap, method, theURL, body, service, awsCreds, signingTimestamp)
|
||||
|
||||
// if payload signing config is set, pass it down to the signing method
|
||||
disablePayloadSigning := false
|
||||
t := awsConfigObj.Get(ast.StringTerm("disable_payload_signing"))
|
||||
if t != nil {
|
||||
if v, ok := t.Value.(ast.Boolean); ok {
|
||||
disablePayloadSigning = bool(v)
|
||||
} else {
|
||||
return builtins.NewOperandErr(2, "invalid value for 'disable_payload_signing' in AWS config")
|
||||
}
|
||||
}
|
||||
|
||||
authHeader, awsHeadersMap := aws.SignV4(headersMap, method, theURL, body, service, awsCreds, signingTimestamp, disablePayloadSigning)
|
||||
signedHeadersObj := ast.NewObject()
|
||||
// Restore original headers
|
||||
for k, v := range headersMap {
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/util/hashmap.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/util/hashmap.go
generated
vendored
@@ -72,7 +72,7 @@ func (h *HashMap) Get(k T) (T, bool) {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// Delete removes the the key k.
|
||||
// Delete removes the key k.
|
||||
func (h *HashMap) Delete(k T) {
|
||||
hash := h.hash(k)
|
||||
var prev *hashEntry
|
||||
|
||||
2
vendor/github.com/open-policy-agent/opa/version/version.go
generated
vendored
2
vendor/github.com/open-policy-agent/opa/version/version.go
generated
vendored
@@ -11,7 +11,7 @@ import (
|
||||
)
|
||||
|
||||
// Version is the canonical version of OPA.
|
||||
var Version = "0.62.1"
|
||||
var Version = "0.64.1"
|
||||
|
||||
// GoVersion is the version of Go this was built with
|
||||
var GoVersion = runtime.Version()
|
||||
|
||||
195
vendor/github.com/prometheus/client_model/go/metrics.pb.go
generated
vendored
195
vendor/github.com/prometheus/client_model/go/metrics.pb.go
generated
vendored
@@ -483,6 +483,8 @@ type Histogram struct {
|
||||
// histograms.
|
||||
PositiveDelta []int64 `protobuf:"zigzag64,13,rep,name=positive_delta,json=positiveDelta" json:"positive_delta,omitempty"` // Count delta of each bucket compared to previous one (or to zero for 1st bucket).
|
||||
PositiveCount []float64 `protobuf:"fixed64,14,rep,name=positive_count,json=positiveCount" json:"positive_count,omitempty"` // Absolute count of each bucket.
|
||||
// Only used for native histograms. These exemplars MUST have a timestamp.
|
||||
Exemplars []*Exemplar `protobuf:"bytes,16,rep,name=exemplars" json:"exemplars,omitempty"`
|
||||
}
|
||||
|
||||
func (x *Histogram) Reset() {
|
||||
@@ -622,6 +624,13 @@ func (x *Histogram) GetPositiveCount() []float64 {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *Histogram) GetExemplars() []*Exemplar {
|
||||
if x != nil {
|
||||
return x.Exemplars
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// A Bucket of a conventional histogram, each of which is treated as
|
||||
// an individual counter-like time series by Prometheus.
|
||||
type Bucket struct {
|
||||
@@ -923,6 +932,7 @@ type MetricFamily struct {
|
||||
Help *string `protobuf:"bytes,2,opt,name=help" json:"help,omitempty"`
|
||||
Type *MetricType `protobuf:"varint,3,opt,name=type,enum=io.prometheus.client.MetricType" json:"type,omitempty"`
|
||||
Metric []*Metric `protobuf:"bytes,4,rep,name=metric" json:"metric,omitempty"`
|
||||
Unit *string `protobuf:"bytes,5,opt,name=unit" json:"unit,omitempty"`
|
||||
}
|
||||
|
||||
func (x *MetricFamily) Reset() {
|
||||
@@ -985,6 +995,13 @@ func (x *MetricFamily) GetMetric() []*Metric {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (x *MetricFamily) GetUnit() string {
|
||||
if x != nil && x.Unit != nil {
|
||||
return *x.Unit
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var File_io_prometheus_client_metrics_proto protoreflect.FileDescriptor
|
||||
|
||||
var file_io_prometheus_client_metrics_proto_rawDesc = []byte{
|
||||
@@ -1028,7 +1045,7 @@ var file_io_prometheus_client_metrics_proto_rawDesc = []byte{
|
||||
0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x10, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64,
|
||||
0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x1f, 0x0a, 0x07, 0x55, 0x6e, 0x74,
|
||||
0x79, 0x70, 0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xac, 0x05, 0x0a, 0x09, 0x48,
|
||||
0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xea, 0x05, 0x0a, 0x09, 0x48,
|
||||
0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x61, 0x6d, 0x70,
|
||||
0x6c, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0b,
|
||||
0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x73,
|
||||
@@ -1071,79 +1088,84 @@ var file_io_prometheus_client_metrics_proto_rawDesc = []byte{
|
||||
0x03, 0x28, 0x12, 0x52, 0x0d, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x44, 0x65, 0x6c,
|
||||
0x74, 0x61, 0x12, 0x25, 0x0a, 0x0e, 0x70, 0x6f, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63,
|
||||
0x6f, 0x75, 0x6e, 0x74, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x01, 0x52, 0x0d, 0x70, 0x6f, 0x73, 0x69,
|
||||
0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xc6, 0x01, 0x0a, 0x06, 0x42, 0x75,
|
||||
0x63, 0x6b, 0x65, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69,
|
||||
0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f,
|
||||
0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12,
|
||||
0x34, 0x0a, 0x16, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f,
|
||||
0x75, 0x6e, 0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52,
|
||||
0x14, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74,
|
||||
0x46, 0x6c, 0x6f, 0x61, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x70, 0x65, 0x72, 0x5f, 0x62,
|
||||
0x6f, 0x75, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x75, 0x70, 0x70, 0x65,
|
||||
0x72, 0x42, 0x6f, 0x75, 0x6e, 0x64, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c,
|
||||
0x61, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72,
|
||||
0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e,
|
||||
0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c,
|
||||
0x61, 0x72, 0x22, 0x3c, 0x0a, 0x0a, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e,
|
||||
0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x11,
|
||||
0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67,
|
||||
0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68,
|
||||
0x22, 0x91, 0x01, 0x0a, 0x08, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x12, 0x35, 0x0a,
|
||||
0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x3c, 0x0a, 0x09, 0x65, 0x78, 0x65,
|
||||
0x6d, 0x70, 0x6c, 0x61, 0x72, 0x73, 0x18, 0x10, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69,
|
||||
0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69,
|
||||
0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x09, 0x65, 0x78,
|
||||
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x73, 0x22, 0xc6, 0x01, 0x0a, 0x06, 0x42, 0x75, 0x63, 0x6b,
|
||||
0x65, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65,
|
||||
0x5f, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x0f, 0x63, 0x75,
|
||||
0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x12, 0x34, 0x0a,
|
||||
0x16, 0x63, 0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x5f, 0x63, 0x6f, 0x75, 0x6e,
|
||||
0x74, 0x5f, 0x66, 0x6c, 0x6f, 0x61, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x01, 0x52, 0x14, 0x63,
|
||||
0x75, 0x6d, 0x75, 0x6c, 0x61, 0x74, 0x69, 0x76, 0x65, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x46, 0x6c,
|
||||
0x6f, 0x61, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x75, 0x70, 0x70, 0x65, 0x72, 0x5f, 0x62, 0x6f, 0x75,
|
||||
0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0a, 0x75, 0x70, 0x70, 0x65, 0x72, 0x42,
|
||||
0x6f, 0x75, 0x6e, 0x64, 0x12, 0x3a, 0x0a, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72,
|
||||
0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d,
|
||||
0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x45, 0x78,
|
||||
0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x52, 0x08, 0x65, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72,
|
||||
0x22, 0x3c, 0x0a, 0x0a, 0x42, 0x75, 0x63, 0x6b, 0x65, 0x74, 0x53, 0x70, 0x61, 0x6e, 0x12, 0x16,
|
||||
0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x11, 0x52, 0x06,
|
||||
0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68,
|
||||
0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x22, 0x91,
|
||||
0x01, 0x0a, 0x08, 0x45, 0x78, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x72, 0x12, 0x35, 0x0a, 0x05, 0x6c,
|
||||
0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e,
|
||||
0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x62,
|
||||
0x65, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69, 0x6d, 0x65,
|
||||
0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f,
|
||||
0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69,
|
||||
0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61,
|
||||
0x6d, 0x70, 0x22, 0xff, 0x02, 0x0a, 0x06, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x12, 0x35, 0x0a,
|
||||
0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69,
|
||||
0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69,
|
||||
0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x52, 0x05, 0x6c,
|
||||
0x61, 0x62, 0x65, 0x6c, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x38, 0x0a, 0x09, 0x74, 0x69,
|
||||
0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e,
|
||||
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e,
|
||||
0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x74, 0x69, 0x6d, 0x65, 0x73,
|
||||
0x74, 0x61, 0x6d, 0x70, 0x22, 0xff, 0x02, 0x0a, 0x06, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x12,
|
||||
0x35, 0x0a, 0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f,
|
||||
0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63,
|
||||
0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x52,
|
||||
0x05, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x31, 0x0a, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x18,
|
||||
0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65,
|
||||
0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x47, 0x61, 0x75,
|
||||
0x67, 0x65, 0x52, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x63, 0x6f, 0x75,
|
||||
0x6e, 0x74, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e,
|
||||
0x61, 0x62, 0x65, 0x6c, 0x12, 0x31, 0x0a, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68,
|
||||
0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x47, 0x61, 0x75, 0x67, 0x65,
|
||||
0x52, 0x05, 0x67, 0x61, 0x75, 0x67, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74,
|
||||
0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72,
|
||||
0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e,
|
||||
0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x52, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72,
|
||||
0x12, 0x37, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28,
|
||||
0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75,
|
||||
0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x53, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79,
|
||||
0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x37, 0x0a, 0x07, 0x75, 0x6e, 0x74,
|
||||
0x79, 0x70, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e,
|
||||
0x74, 0x2e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x65, 0x72, 0x52, 0x07, 0x63, 0x6f, 0x75, 0x6e, 0x74,
|
||||
0x65, 0x72, 0x12, 0x37, 0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x04, 0x20,
|
||||
0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68,
|
||||
0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x53, 0x75, 0x6d, 0x6d, 0x61,
|
||||
0x72, 0x79, 0x52, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x12, 0x37, 0x0a, 0x07, 0x75,
|
||||
0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x69,
|
||||
0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69,
|
||||
0x65, 0x6e, 0x74, 0x2e, 0x55, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x52, 0x07, 0x75, 0x6e, 0x74,
|
||||
0x79, 0x70, 0x65, 0x64, 0x12, 0x3d, 0x0a, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61,
|
||||
0x6d, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x48,
|
||||
0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67,
|
||||
0x72, 0x61, 0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70,
|
||||
0x5f, 0x6d, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x73,
|
||||
0x74, 0x61, 0x6d, 0x70, 0x4d, 0x73, 0x22, 0xa2, 0x01, 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x72, 0x69,
|
||||
0x63, 0x46, 0x61, 0x6d, 0x69, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x68,
|
||||
0x65, 0x6c, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x65, 0x6c, 0x70, 0x12,
|
||||
0x34, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e,
|
||||
0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c,
|
||||
0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x52,
|
||||
0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x34, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x18,
|
||||
0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65,
|
||||
0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74,
|
||||
0x72, 0x69, 0x63, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x2a, 0x62, 0x0a, 0x0a, 0x4d,
|
||||
0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x43, 0x4f, 0x55,
|
||||
0x4e, 0x54, 0x45, 0x52, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x47, 0x41, 0x55, 0x47, 0x45, 0x10,
|
||||
0x01, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x55, 0x4d, 0x4d, 0x41, 0x52, 0x59, 0x10, 0x02, 0x12, 0x0b,
|
||||
0x0a, 0x07, 0x55, 0x4e, 0x54, 0x59, 0x50, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x48,
|
||||
0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x04, 0x12, 0x13, 0x0a, 0x0f, 0x47, 0x41,
|
||||
0x55, 0x47, 0x45, 0x5f, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x05, 0x42,
|
||||
0x52, 0x0a, 0x14, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73,
|
||||
0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e,
|
||||
0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2f, 0x63,
|
||||
0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2f, 0x67, 0x6f, 0x3b, 0x69,
|
||||
0x6f, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x5f, 0x63, 0x6c, 0x69,
|
||||
0x65, 0x6e, 0x74,
|
||||
0x74, 0x2e, 0x55, 0x6e, 0x74, 0x79, 0x70, 0x65, 0x64, 0x52, 0x07, 0x75, 0x6e, 0x74, 0x79, 0x70,
|
||||
0x65, 0x64, 0x12, 0x3d, 0x0a, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x18,
|
||||
0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65,
|
||||
0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x48, 0x69, 0x73,
|
||||
0x74, 0x6f, 0x67, 0x72, 0x61, 0x6d, 0x52, 0x09, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x67, 0x72, 0x61,
|
||||
0x6d, 0x12, 0x21, 0x0a, 0x0c, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x6d,
|
||||
0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x03, 0x52, 0x0b, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61,
|
||||
0x6d, 0x70, 0x4d, 0x73, 0x22, 0xb6, 0x01, 0x0a, 0x0c, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x46,
|
||||
0x61, 0x6d, 0x69, 0x6c, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x65, 0x6c,
|
||||
0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x65, 0x6c, 0x70, 0x12, 0x34, 0x0a,
|
||||
0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x20, 0x2e, 0x69, 0x6f,
|
||||
0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65,
|
||||
0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74,
|
||||
0x79, 0x70, 0x65, 0x12, 0x34, 0x0a, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x18, 0x04, 0x20,
|
||||
0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68,
|
||||
0x65, 0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x2e, 0x4d, 0x65, 0x74, 0x72, 0x69,
|
||||
0x63, 0x52, 0x06, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x12, 0x12, 0x0a, 0x04, 0x75, 0x6e, 0x69,
|
||||
0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x2a, 0x62, 0x0a,
|
||||
0x0a, 0x4d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x43,
|
||||
0x4f, 0x55, 0x4e, 0x54, 0x45, 0x52, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x47, 0x41, 0x55, 0x47,
|
||||
0x45, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x53, 0x55, 0x4d, 0x4d, 0x41, 0x52, 0x59, 0x10, 0x02,
|
||||
0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x54, 0x59, 0x50, 0x45, 0x44, 0x10, 0x03, 0x12, 0x0d, 0x0a,
|
||||
0x09, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10, 0x04, 0x12, 0x13, 0x0a, 0x0f,
|
||||
0x47, 0x41, 0x55, 0x47, 0x45, 0x5f, 0x48, 0x49, 0x53, 0x54, 0x4f, 0x47, 0x52, 0x41, 0x4d, 0x10,
|
||||
0x05, 0x42, 0x52, 0x0a, 0x14, 0x69, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65,
|
||||
0x75, 0x73, 0x2e, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x68, 0x75,
|
||||
0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73,
|
||||
0x2f, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x6c, 0x2f, 0x67, 0x6f,
|
||||
0x3b, 0x69, 0x6f, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x65, 0x74, 0x68, 0x65, 0x75, 0x73, 0x5f, 0x63,
|
||||
0x6c, 0x69, 0x65, 0x6e, 0x74,
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -1185,22 +1207,23 @@ var file_io_prometheus_client_metrics_proto_depIdxs = []int32{
|
||||
13, // 5: io.prometheus.client.Histogram.created_timestamp:type_name -> google.protobuf.Timestamp
|
||||
9, // 6: io.prometheus.client.Histogram.negative_span:type_name -> io.prometheus.client.BucketSpan
|
||||
9, // 7: io.prometheus.client.Histogram.positive_span:type_name -> io.prometheus.client.BucketSpan
|
||||
10, // 8: io.prometheus.client.Bucket.exemplar:type_name -> io.prometheus.client.Exemplar
|
||||
1, // 9: io.prometheus.client.Exemplar.label:type_name -> io.prometheus.client.LabelPair
|
||||
13, // 10: io.prometheus.client.Exemplar.timestamp:type_name -> google.protobuf.Timestamp
|
||||
1, // 11: io.prometheus.client.Metric.label:type_name -> io.prometheus.client.LabelPair
|
||||
2, // 12: io.prometheus.client.Metric.gauge:type_name -> io.prometheus.client.Gauge
|
||||
3, // 13: io.prometheus.client.Metric.counter:type_name -> io.prometheus.client.Counter
|
||||
5, // 14: io.prometheus.client.Metric.summary:type_name -> io.prometheus.client.Summary
|
||||
6, // 15: io.prometheus.client.Metric.untyped:type_name -> io.prometheus.client.Untyped
|
||||
7, // 16: io.prometheus.client.Metric.histogram:type_name -> io.prometheus.client.Histogram
|
||||
0, // 17: io.prometheus.client.MetricFamily.type:type_name -> io.prometheus.client.MetricType
|
||||
11, // 18: io.prometheus.client.MetricFamily.metric:type_name -> io.prometheus.client.Metric
|
||||
19, // [19:19] is the sub-list for method output_type
|
||||
19, // [19:19] is the sub-list for method input_type
|
||||
19, // [19:19] is the sub-list for extension type_name
|
||||
19, // [19:19] is the sub-list for extension extendee
|
||||
0, // [0:19] is the sub-list for field type_name
|
||||
10, // 8: io.prometheus.client.Histogram.exemplars:type_name -> io.prometheus.client.Exemplar
|
||||
10, // 9: io.prometheus.client.Bucket.exemplar:type_name -> io.prometheus.client.Exemplar
|
||||
1, // 10: io.prometheus.client.Exemplar.label:type_name -> io.prometheus.client.LabelPair
|
||||
13, // 11: io.prometheus.client.Exemplar.timestamp:type_name -> google.protobuf.Timestamp
|
||||
1, // 12: io.prometheus.client.Metric.label:type_name -> io.prometheus.client.LabelPair
|
||||
2, // 13: io.prometheus.client.Metric.gauge:type_name -> io.prometheus.client.Gauge
|
||||
3, // 14: io.prometheus.client.Metric.counter:type_name -> io.prometheus.client.Counter
|
||||
5, // 15: io.prometheus.client.Metric.summary:type_name -> io.prometheus.client.Summary
|
||||
6, // 16: io.prometheus.client.Metric.untyped:type_name -> io.prometheus.client.Untyped
|
||||
7, // 17: io.prometheus.client.Metric.histogram:type_name -> io.prometheus.client.Histogram
|
||||
0, // 18: io.prometheus.client.MetricFamily.type:type_name -> io.prometheus.client.MetricType
|
||||
11, // 19: io.prometheus.client.MetricFamily.metric:type_name -> io.prometheus.client.Metric
|
||||
20, // [20:20] is the sub-list for method output_type
|
||||
20, // [20:20] is the sub-list for method input_type
|
||||
20, // [20:20] is the sub-list for extension type_name
|
||||
20, // [20:20] is the sub-list for extension extendee
|
||||
0, // [0:20] is the sub-list for field type_name
|
||||
}
|
||||
|
||||
func init() { file_io_prometheus_client_metrics_proto_init() }
|
||||
|
||||
6
vendor/modules.txt
vendored
6
vendor/modules.txt
vendored
@@ -1506,8 +1506,8 @@ github.com/onsi/gomega/matchers/support/goraph/edge
|
||||
github.com/onsi/gomega/matchers/support/goraph/node
|
||||
github.com/onsi/gomega/matchers/support/goraph/util
|
||||
github.com/onsi/gomega/types
|
||||
# github.com/open-policy-agent/opa v0.62.1
|
||||
## explicit; go 1.20
|
||||
# github.com/open-policy-agent/opa v0.64.1
|
||||
## explicit; go 1.21
|
||||
github.com/open-policy-agent/opa/ast
|
||||
github.com/open-policy-agent/opa/ast/internal/scanner
|
||||
github.com/open-policy-agent/opa/ast/internal/tokens
|
||||
@@ -1647,7 +1647,7 @@ github.com/prometheus/client_golang/prometheus
|
||||
github.com/prometheus/client_golang/prometheus/internal
|
||||
github.com/prometheus/client_golang/prometheus/promauto
|
||||
github.com/prometheus/client_golang/prometheus/promhttp
|
||||
# github.com/prometheus/client_model v0.5.0
|
||||
# github.com/prometheus/client_model v0.6.1
|
||||
## explicit; go 1.19
|
||||
github.com/prometheus/client_model/go
|
||||
# github.com/prometheus/common v0.48.0
|
||||
|
||||
Reference in New Issue
Block a user