diff --git a/go.mod b/go.mod index de4423a4e..bfd423e52 100644 --- a/go.mod +++ b/go.mod @@ -62,7 +62,7 @@ require ( github.com/onsi/ginkgo v1.16.5 github.com/onsi/ginkgo/v2 v2.23.4 github.com/onsi/gomega v1.37.0 - github.com/open-policy-agent/opa v1.4.2 + github.com/open-policy-agent/opa v1.5.0 github.com/opencloud-eu/libre-graph-api-go v1.0.6 github.com/opencloud-eu/reva/v2 v2.33.1-0.20250520152851-d33c49bb52b9 github.com/orcaman/concurrent-map v1.0.0 @@ -177,7 +177,7 @@ require ( github.com/evanphx/json-patch/v5 v5.5.0 // indirect github.com/fatih/color v1.18.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect - github.com/fsnotify/fsnotify v1.8.0 // indirect + github.com/fsnotify/fsnotify v1.9.0 // indirect github.com/gdexlab/go-render v1.0.1 // indirect github.com/go-acme/lego/v4 v4.4.0 // indirect github.com/go-asn1-ber/asn1-ber v1.5.8-0.20250403174932-29230038a667 // indirect @@ -277,7 +277,7 @@ require ( github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/pquerna/cachecontrol v0.2.0 // indirect github.com/prometheus/alertmanager v0.28.1 // indirect - github.com/prometheus/client_model v0.6.1 // indirect + github.com/prometheus/client_model v0.6.2 // indirect github.com/prometheus/common v0.62.0 // indirect github.com/prometheus/procfs v0.15.1 // indirect github.com/prometheus/statsd_exporter v0.22.8 // indirect @@ -305,6 +305,7 @@ require ( github.com/tidwall/pretty v1.2.1 // indirect github.com/toorop/go-dkim v0.0.0-20201103131630-e1cd1a0a5208 // indirect github.com/trustelem/zxcvbn v1.0.1 // indirect + github.com/vektah/gqlparser/v2 v2.5.26 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/wk8/go-ordered-map v1.0.0 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect diff --git a/go.sum b/go.sum index c0388a4d1..63850e802 100644 --- a/go.sum +++ b/go.sum @@ -109,6 +109,8 @@ github.com/alexedwards/argon2id v1.0.0/go.mod h1:tYKkqIjzXvZdzPvADMWOEZ+l6+BD6Ct github.com/aliyun/alibaba-cloud-sdk-go v1.61.976/go.mod h1:pUKYbK5JQ+1Dfxk80P0qxGqe5dkxDoabbZS7zOcouyA= github.com/amoghe/go-crypt v0.0.0-20220222110647-20eada5f5964 h1:I9YN9WMo3SUh7p/4wKeNvD/IQla3U3SUa61U7ul+xM4= github.com/amoghe/go-crypt v0.0.0-20220222110647-20eada5f5964/go.mod h1:eFiR01PwTcpbzXtdMces7zxg6utvFM5puiWHpWB8D/k= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/antithesishq/antithesis-sdk-go v0.4.3-default-no-op h1:+OSa/t11TFhqfrX0EOSqQBDJ0YlpmK0rDSiB19dg9M0= @@ -318,8 +320,8 @@ github.com/fschade/icap-client v0.0.0-20240802074440-aade4a234387 h1:Y3wZgTr29sL github.com/fschade/icap-client v0.0.0-20240802074440-aade4a234387/go.mod h1:HpntrRsQA6RKNXy2Nbr4kVj+NO3OYWpAQUVxeya+3sU= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= -github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k= +github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY= github.com/gabriel-vasile/mimetype v1.4.9/go.mod h1:WnSQhFKJuBlRyLiKohA/2DtIlPFAbguNaG7QCHcyGok= github.com/gdexlab/go-render v1.0.1 h1:rxqB3vo5s4n1kF0ySmoNeSPRYkEsyHgln4jFIQY7v0U= @@ -861,8 +863,8 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.37.0 h1:CdEG8g0S133B4OswTDC/5XPSzE1OeP29QOioj2PID2Y= github.com/onsi/gomega v1.37.0/go.mod h1:8D9+Txp43QWKhM24yyOBEdpkzN8FvJyAwecBgsU4KU0= -github.com/open-policy-agent/opa v1.4.2 h1:ag4upP7zMsa4WE2p1pwAFeG4Pn3mNwfAx9DLhhJfbjU= -github.com/open-policy-agent/opa v1.4.2/go.mod h1:DNzZPKqKh4U0n0ANxcCVlw8lCSv2c+h5G/3QvSYdWZ8= +github.com/open-policy-agent/opa v1.5.0 h1:npsQMUZvafCLYHofoNrZ0cSWbvoDpasvWtrHXdEvSuM= +github.com/open-policy-agent/opa v1.5.0/go.mod h1:bYbS7u+uhTI+cxHQIpzvr5hxX0hV7urWtY+38ZtjMgk= github.com/opencloud-eu/go-micro-plugins/v4/store/nats-js-kv v0.0.0-20250512152754-23325793059a h1:Sakl76blJAaM6NxylVkgSzktjo2dS504iDotEFJsh3M= github.com/opencloud-eu/go-micro-plugins/v4/store/nats-js-kv v0.0.0-20250512152754-23325793059a/go.mod h1:pjcozWijkNPbEtX5SIQaxEW/h8VAVZYTLx+70bmB3LY= github.com/opencloud-eu/libre-graph-api-go v1.0.6 h1:bUQq0tfaLboZZmPuI6C1rr/wFIVOIM9IsE1WqI5QsDA= @@ -933,8 +935,8 @@ github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1: github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= -github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= github.com/prometheus/common v0.0.0-20170706130215-fb369f752a7f/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= @@ -1107,6 +1109,8 @@ github.com/urfave/cli/v2 v2.27.6/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5 github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= +github.com/vektah/gqlparser/v2 v2.5.26 h1:REqqFkO8+SOEgZHR/eHScjjVjGS8Nk3RMO/juiTobN4= +github.com/vektah/gqlparser/v2 v2.5.26/go.mod h1:D1/VCZtV3LPnQrcPBeR/q5jkSQIPti0uYCP/RI0gIeo= github.com/vinyldns/go-vinyldns v0.0.0-20200917153823-148a5f6b8f14/go.mod h1:RWc47jtnVuQv6+lY3c768WtXCas/Xi+U5UFc5xULmYg= github.com/vmihailenco/msgpack/v5 v5.4.1 h1:cQriyiUvjTwOHg8QZaPihLWeRAAVoCpE00IUPn0Bjt8= github.com/vmihailenco/msgpack/v5 v5.4.1/go.mod h1:GaZTsDaehaPpQVyxrf5mtQlH+pc21PIudVV/E3rRQok= diff --git a/vendor/github.com/fsnotify/fsnotify/.cirrus.yml b/vendor/github.com/fsnotify/fsnotify/.cirrus.yml index f4e7dbf37..7f257e99a 100644 --- a/vendor/github.com/fsnotify/fsnotify/.cirrus.yml +++ b/vendor/github.com/fsnotify/fsnotify/.cirrus.yml @@ -1,7 +1,7 @@ freebsd_task: name: 'FreeBSD' freebsd_instance: - image_family: freebsd-14-1 + image_family: freebsd-14-2 install_script: - pkg update -f - pkg install -y go diff --git a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md index fa854785d..6468d2cf4 100644 --- a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md +++ b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md @@ -1,6 +1,39 @@ # Changelog -1.8.0 2023-10-31 +1.9.0 2024-04-04 +---------------- + +### Changes and fixes + +- all: make BufferedWatcher buffered again ([#657]) + +- inotify: fix race when adding/removing watches while a watched path is being + deleted ([#678], [#686]) + +- inotify: don't send empty event if a watched path is unmounted ([#655]) + +- inotify: don't register duplicate watches when watching both a symlink and its + target; previously that would get "half-added" and removing the second would + panic ([#679]) + +- kqueue: fix watching relative symlinks ([#681]) + +- kqueue: correctly mark pre-existing entries when watching a link to a dir on + kqueue ([#682]) + +- illumos: don't send error if changed file is deleted while processing the + event ([#678]) + + +[#657]: https://github.com/fsnotify/fsnotify/pull/657 +[#678]: https://github.com/fsnotify/fsnotify/pull/678 +[#686]: https://github.com/fsnotify/fsnotify/pull/686 +[#655]: https://github.com/fsnotify/fsnotify/pull/655 +[#681]: https://github.com/fsnotify/fsnotify/pull/681 +[#679]: https://github.com/fsnotify/fsnotify/pull/679 +[#682]: https://github.com/fsnotify/fsnotify/pull/682 + +1.8.0 2024-10-31 ---------------- ### Additions diff --git a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md index e4ac2a2ff..4cc40fa59 100644 --- a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md +++ b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md @@ -77,6 +77,7 @@ End-of-line escapes with `\` are not supported. debug [yes/no] # Enable/disable FSNOTIFY_DEBUG (tests are run in parallel by default, so -parallel=1 is probably a good idea). + print [any strings] # Print text to stdout; for debugging. touch path mkdir [-p] dir diff --git a/vendor/github.com/fsnotify/fsnotify/README.md b/vendor/github.com/fsnotify/fsnotify/README.md index e480733d1..1f4eb583d 100644 --- a/vendor/github.com/fsnotify/fsnotify/README.md +++ b/vendor/github.com/fsnotify/fsnotify/README.md @@ -15,7 +15,6 @@ Platform support: | ReadDirectoryChangesW | Windows | Supported | | FEN | illumos | Supported | | fanotify | Linux 5.9+ | [Not yet](https://github.com/fsnotify/fsnotify/issues/114) | -| AHAFS | AIX | [aix branch]; experimental due to lack of maintainer and test environment | | FSEvents | macOS | [Needs support in x/sys/unix][fsevents] | | USN Journals | Windows | [Needs support in x/sys/windows][usn] | | Polling | *All* | [Not yet](https://github.com/fsnotify/fsnotify/issues/9) | @@ -25,7 +24,6 @@ untested. [fsevents]: https://github.com/fsnotify/fsnotify/issues/11#issuecomment-1279133120 [usn]: https://github.com/fsnotify/fsnotify/issues/53#issuecomment-1279829847 -[aix branch]: https://github.com/fsnotify/fsnotify/issues/353#issuecomment-1284590129 Usage ----- diff --git a/vendor/github.com/fsnotify/fsnotify/backend_fen.go b/vendor/github.com/fsnotify/fsnotify/backend_fen.go index c349c326c..57fc69284 100644 --- a/vendor/github.com/fsnotify/fsnotify/backend_fen.go +++ b/vendor/github.com/fsnotify/fsnotify/backend_fen.go @@ -9,6 +9,7 @@ package fsnotify import ( "errors" "fmt" + "io/fs" "os" "path/filepath" "sync" @@ -19,27 +20,25 @@ import ( ) type fen struct { + *shared Events chan Event Errors chan error mu sync.Mutex port *unix.EventPort - done chan struct{} // Channel for sending a "quit message" to the reader goroutine dirs map[string]Op // Explicitly watched directories watches map[string]Op // Explicitly watched non-directories } -func newBackend(ev chan Event, errs chan error) (backend, error) { - return newBufferedBackend(0, ev, errs) -} +var defaultBufferSize = 0 -func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error) { +func newBackend(ev chan Event, errs chan error) (backend, error) { w := &fen{ + shared: newShared(ev, errs), Events: ev, Errors: errs, dirs: make(map[string]Op), watches: make(map[string]Op), - done: make(chan struct{}), } var err error @@ -52,49 +51,10 @@ func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error return w, nil } -// sendEvent attempts to send an event to the user, returning true if the event -// was put in the channel successfully and false if the watcher has been closed. -func (w *fen) sendEvent(name string, op Op) (sent bool) { - select { - case <-w.done: - return false - case w.Events <- Event{Name: name, Op: op}: - return true - } -} - -// sendError attempts to send an error to the user, returning true if the error -// was put in the channel successfully and false if the watcher has been closed. -func (w *fen) sendError(err error) (sent bool) { - if err == nil { - return true - } - select { - case <-w.done: - return false - case w.Errors <- err: - return true - } -} - -func (w *fen) isClosed() bool { - select { - case <-w.done: - return true - default: - return false - } -} - func (w *fen) Close() error { - // Take the lock used by associateFile to prevent lingering events from - // being processed after the close - w.mu.Lock() - defer w.mu.Unlock() - if w.isClosed() { + if w.shared.close() { return nil } - close(w.done) return w.port.Close() } @@ -209,7 +169,7 @@ func (w *fen) readEvents() { return } // There was an error not caused by calling w.Close() - if !w.sendError(err) { + if !w.sendError(fmt.Errorf("port.Get: %w", err)) { return } } @@ -277,13 +237,13 @@ func (w *fen) handleEvent(event *unix.PortEvent) error { isWatched := watchedDir || watchedPath if events&unix.FILE_DELETE != 0 { - if !w.sendEvent(path, Remove) { + if !w.sendEvent(Event{Name: path, Op: Remove}) { return nil } reRegister = false } if events&unix.FILE_RENAME_FROM != 0 { - if !w.sendEvent(path, Rename) { + if !w.sendEvent(Event{Name: path, Op: Rename}) { return nil } // Don't keep watching the new file name @@ -297,7 +257,7 @@ func (w *fen) handleEvent(event *unix.PortEvent) error { // inotify reports a Remove event in this case, so we simulate this // here. - if !w.sendEvent(path, Remove) { + if !w.sendEvent(Event{Name: path, Op: Remove}) { return nil } // Don't keep watching the file that was removed @@ -331,7 +291,7 @@ func (w *fen) handleEvent(event *unix.PortEvent) error { // get here, the sudirectory is already gone. Clearly we were watching // this path but now it is gone. Let's tell the user that it was // removed. - if !w.sendEvent(path, Remove) { + if !w.sendEvent(Event{Name: path, Op: Remove}) { return nil } // Suppress extra write events on removed directories; they are not @@ -346,7 +306,7 @@ func (w *fen) handleEvent(event *unix.PortEvent) error { if err != nil { // The symlink still exists, but the target is gone. Report the // Remove similar to above. - if !w.sendEvent(path, Remove) { + if !w.sendEvent(Event{Name: path, Op: Remove}) { return nil } // Don't return the error @@ -359,7 +319,7 @@ func (w *fen) handleEvent(event *unix.PortEvent) error { return err } } else { - if !w.sendEvent(path, Write) { + if !w.sendEvent(Event{Name: path, Op: Write}) { return nil } } @@ -367,7 +327,7 @@ func (w *fen) handleEvent(event *unix.PortEvent) error { if events&unix.FILE_ATTRIB != 0 && stat != nil { // Only send Chmod if perms changed if stat.Mode().Perm() != fmode.Perm() { - if !w.sendEvent(path, Chmod) { + if !w.sendEvent(Event{Name: path, Op: Chmod}) { return nil } } @@ -376,17 +336,27 @@ func (w *fen) handleEvent(event *unix.PortEvent) error { if stat != nil { // If we get here, it means we've hit an event above that requires us to // continue watching the file or directory - return w.associateFile(path, stat, isWatched) + err := w.associateFile(path, stat, isWatched) + if errors.Is(err, fs.ErrNotExist) { + // Path may have been removed since the stat. + err = nil + } + return err } return nil } +// The directory was modified, so we must find unwatched entities and watch +// them. If something was removed from the directory, nothing will happen, as +// everything else should still be watched. func (w *fen) updateDirectory(path string) error { - // The directory was modified, so we must find unwatched entities and watch - // them. If something was removed from the directory, nothing will happen, - // as everything else should still be watched. files, err := os.ReadDir(path) if err != nil { + // Directory no longer exists: probably just deleted since we got the + // event. + if errors.Is(err, fs.ErrNotExist) { + return nil + } return err } @@ -401,10 +371,15 @@ func (w *fen) updateDirectory(path string) error { return err } err = w.associateFile(path, finfo, false) + if errors.Is(err, fs.ErrNotExist) { + // File may have disappeared between getting the dir listing and + // adding the port: that's okay to ignore. + continue + } if !w.sendError(err) { return nil } - if !w.sendEvent(path, Create) { + if !w.sendEvent(Event{Name: path, Op: Create}) { return nil } } @@ -430,7 +405,7 @@ func (w *fen) associateFile(path string, stat os.FileInfo, follow bool) error { // has fired but we haven't processed it yet. err := w.port.DissociatePath(path) if err != nil && !errors.Is(err, unix.ENOENT) { - return err + return fmt.Errorf("port.DissociatePath(%q): %w", path, err) } } @@ -446,14 +421,22 @@ func (w *fen) associateFile(path string, stat os.FileInfo, follow bool) error { if true { events |= unix.FILE_ATTRIB } - return w.port.AssociatePath(path, stat, events, stat.Mode()) + err := w.port.AssociatePath(path, stat, events, stat.Mode()) + if err != nil { + return fmt.Errorf("port.AssociatePath(%q): %w", path, err) + } + return nil } func (w *fen) dissociateFile(path string, stat os.FileInfo, unused bool) error { if !w.port.PathIsWatched(path) { return nil } - return w.port.DissociatePath(path) + err := w.port.DissociatePath(path) + if err != nil { + return fmt.Errorf("port.DissociatePath(%q): %w", path, err) + } + return nil } func (w *fen) WatchList() []string { diff --git a/vendor/github.com/fsnotify/fsnotify/backend_inotify.go b/vendor/github.com/fsnotify/fsnotify/backend_inotify.go index 36c311694..a36cb89d7 100644 --- a/vendor/github.com/fsnotify/fsnotify/backend_inotify.go +++ b/vendor/github.com/fsnotify/fsnotify/backend_inotify.go @@ -19,6 +19,7 @@ import ( ) type inotify struct { + *shared Events chan Event Errors chan error @@ -27,8 +28,6 @@ type inotify struct { fd int inotifyFile *os.File watches *watches - done chan struct{} // Channel for sending a "quit message" to the reader goroutine - doneMu sync.Mutex doneResp chan struct{} // Channel to respond to Close // Store rename cookies in an array, with the index wrapping to 0. Almost @@ -52,7 +51,6 @@ type inotify struct { type ( watches struct { - mu sync.RWMutex wd map[uint32]*watch // wd → watch path map[string]uint32 // pathname → wd } @@ -75,34 +73,13 @@ func newWatches() *watches { } } -func (w *watches) len() int { - w.mu.RLock() - defer w.mu.RUnlock() - return len(w.wd) -} - -func (w *watches) add(ww *watch) { - w.mu.Lock() - defer w.mu.Unlock() - w.wd[ww.wd] = ww - w.path[ww.path] = ww.wd -} - -func (w *watches) remove(wd uint32) { - w.mu.Lock() - defer w.mu.Unlock() - watch := w.wd[wd] // Could have had Remove() called. See #616. - if watch == nil { - return - } - delete(w.path, watch.path) - delete(w.wd, wd) -} +func (w *watches) byPath(path string) *watch { return w.wd[w.path[path]] } +func (w *watches) byWd(wd uint32) *watch { return w.wd[wd] } +func (w *watches) len() int { return len(w.wd) } +func (w *watches) add(ww *watch) { w.wd[ww.wd] = ww; w.path[ww.path] = ww.wd } +func (w *watches) remove(watch *watch) { delete(w.path, watch.path); delete(w.wd, watch.wd) } func (w *watches) removePath(path string) ([]uint32, error) { - w.mu.Lock() - defer w.mu.Unlock() - path, recurse := recursivePath(path) wd, ok := w.path[path] if !ok { @@ -123,7 +100,7 @@ func (w *watches) removePath(path string) ([]uint32, error) { wds := make([]uint32, 0, 8) wds = append(wds, wd) for p, rwd := range w.path { - if filepath.HasPrefix(p, path) { + if strings.HasPrefix(p, path) { delete(w.path, p) delete(w.wd, rwd) wds = append(wds, rwd) @@ -132,22 +109,7 @@ func (w *watches) removePath(path string) ([]uint32, error) { return wds, nil } -func (w *watches) byPath(path string) *watch { - w.mu.RLock() - defer w.mu.RUnlock() - return w.wd[w.path[path]] -} - -func (w *watches) byWd(wd uint32) *watch { - w.mu.RLock() - defer w.mu.RUnlock() - return w.wd[wd] -} - func (w *watches) updatePath(path string, f func(*watch) (*watch, error)) error { - w.mu.Lock() - defer w.mu.Unlock() - var existing *watch wd, ok := w.path[path] if ok { @@ -170,11 +132,9 @@ func (w *watches) updatePath(path string, f func(*watch) (*watch, error)) error return nil } -func newBackend(ev chan Event, errs chan error) (backend, error) { - return newBufferedBackend(0, ev, errs) -} +var defaultBufferSize = 0 -func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error) { +func newBackend(ev chan Event, errs chan error) (backend, error) { // Need to set nonblocking mode for SetDeadline to work, otherwise blocking // I/O operations won't terminate on close. fd, errno := unix.InotifyInit1(unix.IN_CLOEXEC | unix.IN_NONBLOCK) @@ -183,12 +143,12 @@ func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error } w := &inotify{ + shared: newShared(ev, errs), Events: ev, Errors: errs, fd: fd, inotifyFile: os.NewFile(uintptr(fd), ""), watches: newWatches(), - done: make(chan struct{}), doneResp: make(chan struct{}), } @@ -196,46 +156,10 @@ func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error return w, nil } -// Returns true if the event was sent, or false if watcher is closed. -func (w *inotify) sendEvent(e Event) bool { - select { - case <-w.done: - return false - case w.Events <- e: - return true - } -} - -// Returns true if the error was sent, or false if watcher is closed. -func (w *inotify) sendError(err error) bool { - if err == nil { - return true - } - select { - case <-w.done: - return false - case w.Errors <- err: - return true - } -} - -func (w *inotify) isClosed() bool { - select { - case <-w.done: - return true - default: - return false - } -} - func (w *inotify) Close() error { - w.doneMu.Lock() - if w.isClosed() { - w.doneMu.Unlock() + if w.shared.close() { return nil } - close(w.done) - w.doneMu.Unlock() // Causes any blocking reads to return with an error, provided the file // still supports deadline operations. @@ -244,9 +168,7 @@ func (w *inotify) Close() error { return err } - // Wait for goroutine to close - <-w.doneResp - + <-w.doneResp // Wait for readEvents() to finish. return nil } @@ -266,6 +188,43 @@ func (w *inotify) AddWith(path string, opts ...addOpt) error { return fmt.Errorf("%w: %s", xErrUnsupported, with.op) } + add := func(path string, with withOpts, recurse bool) error { + var flags uint32 + if with.noFollow { + flags |= unix.IN_DONT_FOLLOW + } + if with.op.Has(Create) { + flags |= unix.IN_CREATE + } + if with.op.Has(Write) { + flags |= unix.IN_MODIFY + } + if with.op.Has(Remove) { + flags |= unix.IN_DELETE | unix.IN_DELETE_SELF + } + if with.op.Has(Rename) { + flags |= unix.IN_MOVED_TO | unix.IN_MOVED_FROM | unix.IN_MOVE_SELF + } + if with.op.Has(Chmod) { + flags |= unix.IN_ATTRIB + } + if with.op.Has(xUnportableOpen) { + flags |= unix.IN_OPEN + } + if with.op.Has(xUnportableRead) { + flags |= unix.IN_ACCESS + } + if with.op.Has(xUnportableCloseWrite) { + flags |= unix.IN_CLOSE_WRITE + } + if with.op.Has(xUnportableCloseRead) { + flags |= unix.IN_CLOSE_NOWRITE + } + return w.register(path, flags, recurse) + } + + w.mu.Lock() + defer w.mu.Unlock() path, recurse := recursivePath(path) if recurse { return filepath.WalkDir(path, func(root string, d fs.DirEntry, err error) error { @@ -289,46 +248,11 @@ func (w *inotify) AddWith(path string, opts ...addOpt) error { w.sendEvent(Event{Name: root, Op: Create}) } - return w.add(root, with, true) + return add(root, with, true) }) } - return w.add(path, with, false) -} - -func (w *inotify) add(path string, with withOpts, recurse bool) error { - var flags uint32 - if with.noFollow { - flags |= unix.IN_DONT_FOLLOW - } - if with.op.Has(Create) { - flags |= unix.IN_CREATE - } - if with.op.Has(Write) { - flags |= unix.IN_MODIFY - } - if with.op.Has(Remove) { - flags |= unix.IN_DELETE | unix.IN_DELETE_SELF - } - if with.op.Has(Rename) { - flags |= unix.IN_MOVED_TO | unix.IN_MOVED_FROM | unix.IN_MOVE_SELF - } - if with.op.Has(Chmod) { - flags |= unix.IN_ATTRIB - } - if with.op.Has(xUnportableOpen) { - flags |= unix.IN_OPEN - } - if with.op.Has(xUnportableRead) { - flags |= unix.IN_ACCESS - } - if with.op.Has(xUnportableCloseWrite) { - flags |= unix.IN_CLOSE_WRITE - } - if with.op.Has(xUnportableCloseRead) { - flags |= unix.IN_CLOSE_NOWRITE - } - return w.register(path, flags, recurse) + return add(path, with, false) } func (w *inotify) register(path string, flags uint32, recurse bool) error { @@ -342,6 +266,10 @@ func (w *inotify) register(path string, flags uint32, recurse bool) error { return nil, err } + if e, ok := w.watches.wd[uint32(wd)]; ok { + return e, nil + } + if existing == nil { return &watch{ wd: uint32(wd), @@ -365,6 +293,9 @@ func (w *inotify) Remove(name string) error { fmt.Fprintf(os.Stderr, "FSNOTIFY_DEBUG: %s Remove(%q)\n", time.Now().Format("15:04:05.000000000"), name) } + + w.mu.Lock() + defer w.mu.Unlock() return w.remove(filepath.Clean(name)) } @@ -399,13 +330,12 @@ func (w *inotify) WatchList() []string { return nil } + w.mu.Lock() + defer w.mu.Unlock() entries := make([]string, 0, w.watches.len()) - w.watches.mu.RLock() for pathname := range w.watches.path { entries = append(entries, pathname) } - w.watches.mu.RUnlock() - return entries } @@ -418,21 +348,17 @@ func (w *inotify) readEvents() { close(w.Events) }() - var ( - buf [unix.SizeofInotifyEvent * 4096]byte // Buffer for a maximum of 4096 raw events - errno error // Syscall errno - ) + var buf [unix.SizeofInotifyEvent * 4096]byte // Buffer for a maximum of 4096 raw events for { - // See if we have been closed. if w.isClosed() { return } n, err := w.inotifyFile.Read(buf[:]) - switch { - case errors.Unwrap(err) == os.ErrClosed: - return - case err != nil: + if err != nil { + if errors.Is(err, os.ErrClosed) { + return + } if !w.sendError(err) { return } @@ -440,13 +366,9 @@ func (w *inotify) readEvents() { } if n < unix.SizeofInotifyEvent { - var err error + err := errors.New("notify: short read in readEvents()") // Read was too short. if n == 0 { err = io.EOF // If EOF is received. This should really never happen. - } else if n < 0 { - err = errno // If an error occurred while reading. - } else { - err = errors.New("notify: short read in readEvents()") // Read was too short. } if !w.sendError(err) { return @@ -454,134 +376,137 @@ func (w *inotify) readEvents() { continue } - // We don't know how many events we just read into the buffer - // While the offset points to at least one whole event... + // We don't know how many events we just read into the buffer While the + // offset points to at least one whole event. var offset uint32 for offset <= uint32(n-unix.SizeofInotifyEvent) { - var ( - // Point "raw" to the event in the buffer - raw = (*unix.InotifyEvent)(unsafe.Pointer(&buf[offset])) - mask = uint32(raw.Mask) - nameLen = uint32(raw.Len) - // Move to the next event in the buffer - next = func() { offset += unix.SizeofInotifyEvent + nameLen } - ) + // Point to the event in the buffer. + inEvent := (*unix.InotifyEvent)(unsafe.Pointer(&buf[offset])) - if mask&unix.IN_Q_OVERFLOW != 0 { + if inEvent.Mask&unix.IN_Q_OVERFLOW != 0 { if !w.sendError(ErrEventOverflow) { return } } - /// If the event happened to the watched directory or the watched - /// file, the kernel doesn't append the filename to the event, but - /// we would like to always fill the the "Name" field with a valid - /// filename. We retrieve the path of the watch from the "paths" - /// map. - watch := w.watches.byWd(uint32(raw.Wd)) - /// Can be nil if Remove() was called in another goroutine for this - /// path inbetween reading the events from the kernel and reading - /// the internal state. Not much we can do about it, so just skip. - /// See #616. - if watch == nil { - next() - continue + ev, ok := w.handleEvent(inEvent, &buf, offset) + if !ok { + return } - - name := watch.path - if nameLen > 0 { - /// Point "bytes" at the first byte of the filename - bytes := (*[unix.PathMax]byte)(unsafe.Pointer(&buf[offset+unix.SizeofInotifyEvent]))[:nameLen:nameLen] - /// The filename is padded with NULL bytes. TrimRight() gets rid of those. - name += "/" + strings.TrimRight(string(bytes[0:nameLen]), "\000") - } - - if debug { - internal.Debug(name, raw.Mask, raw.Cookie) - } - - if mask&unix.IN_IGNORED != 0 { //&& event.Op != 0 - next() - continue - } - - // inotify will automatically remove the watch on deletes; just need - // to clean our state here. - if mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF { - w.watches.remove(watch.wd) - } - - // We can't really update the state when a watched path is moved; - // only IN_MOVE_SELF is sent and not IN_MOVED_{FROM,TO}. So remove - // the watch. - if mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF { - if watch.recurse { - next() // Do nothing - continue - } - - err := w.remove(watch.path) - if err != nil && !errors.Is(err, ErrNonExistentWatch) { - if !w.sendError(err) { - return - } - } - } - - /// Skip if we're watching both this path and the parent; the parent - /// will already send a delete so no need to do it twice. - if mask&unix.IN_DELETE_SELF != 0 { - if _, ok := w.watches.path[filepath.Dir(watch.path)]; ok { - next() - continue - } - } - - ev := w.newEvent(name, mask, raw.Cookie) - // Need to update watch path for recurse. - if watch.recurse { - isDir := mask&unix.IN_ISDIR == unix.IN_ISDIR - /// New directory created: set up watch on it. - if isDir && ev.Has(Create) { - err := w.register(ev.Name, watch.flags, true) - if !w.sendError(err) { - return - } - - // This was a directory rename, so we need to update all - // the children. - // - // TODO: this is of course pretty slow; we should use a - // better data structure for storing all of this, e.g. store - // children in the watch. I have some code for this in my - // kqueue refactor we can use in the future. For now I'm - // okay with this as it's not publicly available. - // Correctness first, performance second. - if ev.renamedFrom != "" { - w.watches.mu.Lock() - for k, ww := range w.watches.wd { - if k == watch.wd || ww.path == ev.Name { - continue - } - if strings.HasPrefix(ww.path, ev.renamedFrom) { - ww.path = strings.Replace(ww.path, ev.renamedFrom, ev.Name, 1) - w.watches.wd[k] = ww - } - } - w.watches.mu.Unlock() - } - } - } - - /// Send the events that are not ignored on the events channel if !w.sendEvent(ev) { return } - next() + + // Move to the next event in the buffer + offset += unix.SizeofInotifyEvent + inEvent.Len } } } +func (w *inotify) handleEvent(inEvent *unix.InotifyEvent, buf *[65536]byte, offset uint32) (Event, bool) { + w.mu.Lock() + defer w.mu.Unlock() + + /// If the event happened to the watched directory or the watched file, the + /// kernel doesn't append the filename to the event, but we would like to + /// always fill the the "Name" field with a valid filename. We retrieve the + /// path of the watch from the "paths" map. + /// + /// Can be nil if Remove() was called in another goroutine for this path + /// inbetween reading the events from the kernel and reading the internal + /// state. Not much we can do about it, so just skip. See #616. + watch := w.watches.byWd(uint32(inEvent.Wd)) + if watch == nil { + return Event{}, true + } + + var ( + name = watch.path + nameLen = uint32(inEvent.Len) + ) + if nameLen > 0 { + /// Point "bytes" at the first byte of the filename + bb := *buf + bytes := (*[unix.PathMax]byte)(unsafe.Pointer(&bb[offset+unix.SizeofInotifyEvent]))[:nameLen:nameLen] + /// The filename is padded with NULL bytes. TrimRight() gets rid of those. + name += "/" + strings.TrimRight(string(bytes[0:nameLen]), "\x00") + } + + if debug { + internal.Debug(name, inEvent.Mask, inEvent.Cookie) + } + + if inEvent.Mask&unix.IN_IGNORED != 0 || inEvent.Mask&unix.IN_UNMOUNT != 0 { + w.watches.remove(watch) + return Event{}, true + } + + // inotify will automatically remove the watch on deletes; just need + // to clean our state here. + if inEvent.Mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF { + w.watches.remove(watch) + } + + // We can't really update the state when a watched path is moved; only + // IN_MOVE_SELF is sent and not IN_MOVED_{FROM,TO}. So remove the watch. + if inEvent.Mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF { + if watch.recurse { // Do nothing + return Event{}, true + } + + err := w.remove(watch.path) + if err != nil && !errors.Is(err, ErrNonExistentWatch) { + if !w.sendError(err) { + return Event{}, false + } + } + } + + /// Skip if we're watching both this path and the parent; the parent will + /// already send a delete so no need to do it twice. + if inEvent.Mask&unix.IN_DELETE_SELF != 0 { + _, ok := w.watches.path[filepath.Dir(watch.path)] + if ok { + return Event{}, true + } + } + + ev := w.newEvent(name, inEvent.Mask, inEvent.Cookie) + // Need to update watch path for recurse. + if watch.recurse { + isDir := inEvent.Mask&unix.IN_ISDIR == unix.IN_ISDIR + /// New directory created: set up watch on it. + if isDir && ev.Has(Create) { + err := w.register(ev.Name, watch.flags, true) + if !w.sendError(err) { + return Event{}, false + } + + // This was a directory rename, so we need to update all the + // children. + // + // TODO: this is of course pretty slow; we should use a better data + // structure for storing all of this, e.g. store children in the + // watch. I have some code for this in my kqueue refactor we can use + // in the future. For now I'm okay with this as it's not publicly + // available. Correctness first, performance second. + if ev.renamedFrom != "" { + for k, ww := range w.watches.wd { + if k == watch.wd || ww.path == ev.Name { + continue + } + if strings.HasPrefix(ww.path, ev.renamedFrom) { + ww.path = strings.Replace(ww.path, ev.renamedFrom, ev.Name, 1) + w.watches.wd[k] = ww + } + } + } + } + } + + return ev, true +} + func (w *inotify) isRecursive(path string) bool { ww := w.watches.byPath(path) if ww == nil { // path could be a file, so also check the Dir. @@ -650,8 +575,8 @@ func (w *inotify) xSupports(op Op) bool { } func (w *inotify) state() { - w.watches.mu.Lock() - defer w.watches.mu.Unlock() + w.mu.Lock() + defer w.mu.Unlock() for wd, ww := range w.watches.wd { fmt.Fprintf(os.Stderr, "%4d: recurse=%t %q\n", wd, ww.recurse, ww.path) } diff --git a/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go b/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go index d8de5ab76..340aeec06 100644 --- a/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go +++ b/vendor/github.com/fsnotify/fsnotify/backend_kqueue.go @@ -16,14 +16,13 @@ import ( ) type kqueue struct { + *shared Events chan Event Errors chan error kq int // File descriptor (as returned by the kqueue() syscall). closepipe [2]int // Pipe used for closing kq. watches *watches - done chan struct{} - doneMu sync.Mutex } type ( @@ -132,14 +131,18 @@ func (w *watches) byPath(path string) (watch, bool) { return info, ok } -func (w *watches) updateDirFlags(path string, flags uint32) { +func (w *watches) updateDirFlags(path string, flags uint32) bool { w.mu.Lock() defer w.mu.Unlock() - fd := w.path[path] + fd, ok := w.path[path] + if !ok { // Already deleted: don't re-set it here. + return false + } info := w.wd[fd] info.dirFlags = flags w.wd[fd] = info + return true } func (w *watches) remove(fd int, path string) bool { @@ -179,22 +182,20 @@ func (w *watches) seenBefore(path string) bool { return ok } -func newBackend(ev chan Event, errs chan error) (backend, error) { - return newBufferedBackend(0, ev, errs) -} +var defaultBufferSize = 0 -func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error) { +func newBackend(ev chan Event, errs chan error) (backend, error) { kq, closepipe, err := newKqueue() if err != nil { return nil, err } w := &kqueue{ + shared: newShared(ev, errs), Events: ev, Errors: errs, kq: kq, closepipe: closepipe, - done: make(chan struct{}), watches: newWatches(), } @@ -210,7 +211,7 @@ func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error // all. func newKqueue() (kq int, closepipe [2]int, err error) { kq, err = unix.Kqueue() - if kq == -1 { + if err != nil { return kq, closepipe, err } @@ -239,54 +240,17 @@ func newKqueue() (kq int, closepipe [2]int, err error) { return kq, closepipe, nil } -// Returns true if the event was sent, or false if watcher is closed. -func (w *kqueue) sendEvent(e Event) bool { - select { - case <-w.done: - return false - case w.Events <- e: - return true - } -} - -// Returns true if the error was sent, or false if watcher is closed. -func (w *kqueue) sendError(err error) bool { - if err == nil { - return true - } - select { - case <-w.done: - return false - case w.Errors <- err: - return true - } -} - -func (w *kqueue) isClosed() bool { - select { - case <-w.done: - return true - default: - return false - } -} - func (w *kqueue) Close() error { - w.doneMu.Lock() - if w.isClosed() { - w.doneMu.Unlock() + if w.shared.close() { return nil } - close(w.done) - w.doneMu.Unlock() pathsToRemove := w.watches.listPaths(false) for _, name := range pathsToRemove { w.Remove(name) } - // Send "quit" message to the reader goroutine. - unix.Close(w.closepipe[1]) + unix.Close(w.closepipe[1]) // Send "quit" message to readEvents return nil } @@ -303,7 +267,7 @@ func (w *kqueue) AddWith(name string, opts ...addOpt) error { return fmt.Errorf("%w: %s", xErrUnsupported, with.op) } - _, err := w.addWatch(name, noteAllEvents) + _, err := w.addWatch(name, noteAllEvents, false) if err != nil { return err } @@ -366,7 +330,7 @@ const noteAllEvents = unix.NOTE_DELETE | unix.NOTE_WRITE | unix.NOTE_ATTRIB | un // described in kevent(2). // // Returns the real path to the file which was added, with symlinks resolved. -func (w *kqueue) addWatch(name string, flags uint32) (string, error) { +func (w *kqueue) addWatch(name string, flags uint32, listDir bool) (string, error) { if w.isClosed() { return "", ErrClosed } @@ -385,15 +349,15 @@ func (w *kqueue) addWatch(name string, flags uint32) (string, error) { return "", nil } - // Follow symlinks. - if fi.Mode()&os.ModeSymlink == os.ModeSymlink { + // Follow symlinks, but only for paths added with Add(), and not paths + // we're adding from internalWatch from a listdir. + if !listDir && fi.Mode()&os.ModeSymlink == os.ModeSymlink { link, err := os.Readlink(name) if err != nil { - // Return nil because Linux can add unresolvable symlinks to the - // watch list without problems, so maintain consistency with - // that. There will be no file events for broken symlinks. - // TODO: more specific check; returns os.PathError; ENOENT? - return "", nil + return "", err + } + if !filepath.IsAbs(link) { + link = filepath.Join(filepath.Dir(name), link) } _, alreadyWatching = w.watches.byPath(link) @@ -408,7 +372,7 @@ func (w *kqueue) addWatch(name string, flags uint32) (string, error) { name = link fi, err = os.Lstat(name) if err != nil { - return "", nil + return "", err } } @@ -422,7 +386,6 @@ func (w *kqueue) addWatch(name string, flags uint32) (string, error) { if errors.Is(err, unix.EINTR) { continue } - return "", err } @@ -444,10 +407,16 @@ func (w *kqueue) addWatch(name string, flags uint32) (string, error) { if info.isDir { watchDir := (flags&unix.NOTE_WRITE) == unix.NOTE_WRITE && (!alreadyWatching || (info.dirFlags&unix.NOTE_WRITE) != unix.NOTE_WRITE) - w.watches.updateDirFlags(name, flags) + if !w.watches.updateDirFlags(name, flags) { + return "", nil + } if watchDir { - if err := w.watchDirectoryFiles(name); err != nil { + d := name + if info.linkName != "" { + d = info.linkName + } + if err := w.watchDirectoryFiles(d); err != nil { return "", err } } @@ -644,19 +613,22 @@ func (w *kqueue) dirChange(dir string) error { if errors.Is(err, os.ErrNotExist) { return nil } - return fmt.Errorf("fsnotify.dirChange: %w", err) + return fmt.Errorf("fsnotify.dirChange %q: %w", dir, err) } for _, f := range files { fi, err := f.Info() if err != nil { + if errors.Is(err, os.ErrNotExist) { + return nil + } return fmt.Errorf("fsnotify.dirChange: %w", err) } err = w.sendCreateIfNew(filepath.Join(dir, fi.Name()), fi) if err != nil { // Don't need to send an error if this file isn't readable. - if errors.Is(err, unix.EACCES) || errors.Is(err, unix.EPERM) { + if errors.Is(err, unix.EACCES) || errors.Is(err, unix.EPERM) || errors.Is(err, os.ErrNotExist) { return nil } return fmt.Errorf("fsnotify.dirChange: %w", err) @@ -688,11 +660,11 @@ func (w *kqueue) internalWatch(name string, fi os.FileInfo) (string, error) { // mimic Linux providing delete events for subdirectories, but preserve // the flags used if currently watching subdirectory info, _ := w.watches.byPath(name) - return w.addWatch(name, info.dirFlags|unix.NOTE_DELETE|unix.NOTE_RENAME) + return w.addWatch(name, info.dirFlags|unix.NOTE_DELETE|unix.NOTE_RENAME, true) } - // watch file to mimic Linux inotify - return w.addWatch(name, noteAllEvents) + // Watch file to mimic Linux inotify. + return w.addWatch(name, noteAllEvents, true) } // Register events with the queue. @@ -722,9 +694,9 @@ func (w *kqueue) read(events []unix.Kevent_t) ([]unix.Kevent_t, error) { } func (w *kqueue) xSupports(op Op) bool { - if runtime.GOOS == "freebsd" { - //return true // Supports everything. - } + //if runtime.GOOS == "freebsd" { + // return true // Supports everything. + //} if op.Has(xUnportableOpen) || op.Has(xUnportableRead) || op.Has(xUnportableCloseWrite) || op.Has(xUnportableCloseRead) { return false diff --git a/vendor/github.com/fsnotify/fsnotify/backend_other.go b/vendor/github.com/fsnotify/fsnotify/backend_other.go index 5eb5dbc66..b8c0ad722 100644 --- a/vendor/github.com/fsnotify/fsnotify/backend_other.go +++ b/vendor/github.com/fsnotify/fsnotify/backend_other.go @@ -9,12 +9,11 @@ type other struct { Errors chan error } +var defaultBufferSize = 0 + func newBackend(ev chan Event, errs chan error) (backend, error) { return nil, errors.New("fsnotify not supported on the current platform") } -func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error) { - return newBackend(ev, errs) -} func (w *other) Close() error { return nil } func (w *other) WatchList() []string { return nil } func (w *other) Add(name string) error { return nil } diff --git a/vendor/github.com/fsnotify/fsnotify/backend_windows.go b/vendor/github.com/fsnotify/fsnotify/backend_windows.go index c54a63083..3433642d6 100644 --- a/vendor/github.com/fsnotify/fsnotify/backend_windows.go +++ b/vendor/github.com/fsnotify/fsnotify/backend_windows.go @@ -28,18 +28,16 @@ type readDirChangesW struct { port windows.Handle // Handle to completion port input chan *input // Inputs to the reader are sent on this channel - quit chan chan<- error + done chan chan<- error mu sync.Mutex // Protects access to watches, closed watches watchMap // Map of watches (key: i-number) closed bool // Set to true when Close() is first called } -func newBackend(ev chan Event, errs chan error) (backend, error) { - return newBufferedBackend(50, ev, errs) -} +var defaultBufferSize = 50 -func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error) { +func newBackend(ev chan Event, errs chan error) (backend, error) { port, err := windows.CreateIoCompletionPort(windows.InvalidHandle, 0, 0, 0) if err != nil { return nil, os.NewSyscallError("CreateIoCompletionPort", err) @@ -50,7 +48,7 @@ func newBufferedBackend(sz uint, ev chan Event, errs chan error) (backend, error port: port, watches: make(watchMap), input: make(chan *input, 1), - quit: make(chan chan<- error, 1), + done: make(chan chan<- error, 1), } go w.readEvents() return w, nil @@ -70,8 +68,8 @@ func (w *readDirChangesW) sendEvent(name, renamedFrom string, mask uint64) bool event := w.newEvent(name, uint32(mask)) event.renamedFrom = renamedFrom select { - case ch := <-w.quit: - w.quit <- ch + case ch := <-w.done: + w.done <- ch case w.Events <- event: } return true @@ -83,10 +81,10 @@ func (w *readDirChangesW) sendError(err error) bool { return true } select { + case <-w.done: + return false case w.Errors <- err: return true - case <-w.quit: - return false } } @@ -99,9 +97,9 @@ func (w *readDirChangesW) Close() error { w.closed = true w.mu.Unlock() - // Send "quit" message to the reader goroutine + // Send "done" message to the reader goroutine ch := make(chan error) - w.quit <- ch + w.done <- ch if err := w.wakeupReader(); err != nil { return err } @@ -495,7 +493,7 @@ func (w *readDirChangesW) readEvents() { watch := (*watch)(unsafe.Pointer(ov)) if watch == nil { select { - case ch := <-w.quit: + case ch := <-w.done: w.mu.Lock() var indexes []indexMap for _, index := range w.watches { diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify.go b/vendor/github.com/fsnotify/fsnotify/fsnotify.go index 0760efe91..f64be4bf9 100644 --- a/vendor/github.com/fsnotify/fsnotify/fsnotify.go +++ b/vendor/github.com/fsnotify/fsnotify/fsnotify.go @@ -244,12 +244,13 @@ var ( // ErrUnsupported is returned by AddWith() when WithOps() specified an // Unportable event that's not supported on this platform. + //lint:ignore ST1012 not relevant xErrUnsupported = errors.New("fsnotify: not supported with this backend") ) // NewWatcher creates a new Watcher. func NewWatcher() (*Watcher, error) { - ev, errs := make(chan Event), make(chan error) + ev, errs := make(chan Event, defaultBufferSize), make(chan error) b, err := newBackend(ev, errs) if err != nil { return nil, err @@ -266,8 +267,8 @@ func NewWatcher() (*Watcher, error) { // cases, and whenever possible you will be better off increasing the kernel // buffers instead of adding a large userspace buffer. func NewBufferedWatcher(sz uint) (*Watcher, error) { - ev, errs := make(chan Event), make(chan error) - b, err := newBufferedBackend(sz, ev, errs) + ev, errs := make(chan Event, sz), make(chan error) + b, err := newBackend(ev, errs) if err != nil { return nil, err } @@ -337,7 +338,8 @@ func (w *Watcher) Close() error { return w.b.Close() } // WatchList returns all paths explicitly added with [Watcher.Add] (and are not // yet removed). // -// Returns nil if [Watcher.Close] was called. +// The order is undefined, and may differ per call. Returns nil if +// [Watcher.Close] was called. func (w *Watcher) WatchList() []string { return w.b.WatchList() } // Supports reports if all the listed operations are supported by this platform. diff --git a/vendor/github.com/fsnotify/fsnotify/internal/darwin.go b/vendor/github.com/fsnotify/fsnotify/internal/darwin.go index b0eab1009..0b01bc182 100644 --- a/vendor/github.com/fsnotify/fsnotify/internal/darwin.go +++ b/vendor/github.com/fsnotify/fsnotify/internal/darwin.go @@ -9,14 +9,14 @@ import ( ) var ( - SyscallEACCES = syscall.EACCES - UnixEACCES = unix.EACCES + ErrSyscallEACCES = syscall.EACCES + ErrUnixEACCES = unix.EACCES ) var maxfiles uint64 -// Go 1.19 will do this automatically: https://go-review.googlesource.com/c/go/+/393354/ func SetRlimit() { + // Go 1.19 will do this automatically: https://go-review.googlesource.com/c/go/+/393354/ var l syscall.Rlimit err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &l) if err == nil && l.Cur != l.Max { diff --git a/vendor/github.com/fsnotify/fsnotify/internal/freebsd.go b/vendor/github.com/fsnotify/fsnotify/internal/freebsd.go index 547df1df8..5ac8b5079 100644 --- a/vendor/github.com/fsnotify/fsnotify/internal/freebsd.go +++ b/vendor/github.com/fsnotify/fsnotify/internal/freebsd.go @@ -9,8 +9,8 @@ import ( ) var ( - SyscallEACCES = syscall.EACCES - UnixEACCES = unix.EACCES + ErrSyscallEACCES = syscall.EACCES + ErrUnixEACCES = unix.EACCES ) var maxfiles uint64 diff --git a/vendor/github.com/fsnotify/fsnotify/internal/unix.go b/vendor/github.com/fsnotify/fsnotify/internal/unix.go index 30976ce97..b251fb803 100644 --- a/vendor/github.com/fsnotify/fsnotify/internal/unix.go +++ b/vendor/github.com/fsnotify/fsnotify/internal/unix.go @@ -1,4 +1,4 @@ -//go:build !windows && !darwin && !freebsd +//go:build !windows && !darwin && !freebsd && !plan9 package internal @@ -9,8 +9,8 @@ import ( ) var ( - SyscallEACCES = syscall.EACCES - UnixEACCES = unix.EACCES + ErrSyscallEACCES = syscall.EACCES + ErrUnixEACCES = unix.EACCES ) var maxfiles uint64 diff --git a/vendor/github.com/fsnotify/fsnotify/internal/windows.go b/vendor/github.com/fsnotify/fsnotify/internal/windows.go index a72c64954..896bc2e5a 100644 --- a/vendor/github.com/fsnotify/fsnotify/internal/windows.go +++ b/vendor/github.com/fsnotify/fsnotify/internal/windows.go @@ -10,8 +10,8 @@ import ( // Just a dummy. var ( - SyscallEACCES = errors.New("dummy") - UnixEACCES = errors.New("dummy") + ErrSyscallEACCES = errors.New("dummy") + ErrUnixEACCES = errors.New("dummy") ) func SetRlimit() {} diff --git a/vendor/github.com/fsnotify/fsnotify/shared.go b/vendor/github.com/fsnotify/fsnotify/shared.go new file mode 100644 index 000000000..3ee9b58f1 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/shared.go @@ -0,0 +1,64 @@ +package fsnotify + +import "sync" + +type shared struct { + Events chan Event + Errors chan error + done chan struct{} + mu sync.Mutex +} + +func newShared(ev chan Event, errs chan error) *shared { + return &shared{ + Events: ev, + Errors: errs, + done: make(chan struct{}), + } +} + +// Returns true if the event was sent, or false if watcher is closed. +func (w *shared) sendEvent(e Event) bool { + if e.Op == 0 { + return true + } + select { + case <-w.done: + return false + case w.Events <- e: + return true + } +} + +// Returns true if the error was sent, or false if watcher is closed. +func (w *shared) sendError(err error) bool { + if err == nil { + return true + } + select { + case <-w.done: + return false + case w.Errors <- err: + return true + } +} + +func (w *shared) isClosed() bool { + select { + case <-w.done: + return true + default: + return false + } +} + +// Mark as closed; returns true if it was already closed. +func (w *shared) close() bool { + w.mu.Lock() + defer w.mu.Unlock() + if w.isClosed() { + return true + } + close(w.done) + return false +} diff --git a/vendor/github.com/fsnotify/fsnotify/staticcheck.conf b/vendor/github.com/fsnotify/fsnotify/staticcheck.conf new file mode 100644 index 000000000..8fa7351f0 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/staticcheck.conf @@ -0,0 +1,3 @@ +checks = ['all', + '-U1000', # Don't complain about unused functions. +] diff --git a/vendor/github.com/open-policy-agent/opa/ast/compare.go b/vendor/github.com/open-policy-agent/opa/ast/compare.go index d36078e33..5e617e992 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/compare.go +++ b/vendor/github.com/open-policy-agent/opa/ast/compare.go @@ -34,6 +34,6 @@ import ( // Sets are considered equal if and only if the symmetric difference of a and b // is empty. // Other comparisons are consistent but not defined. -func Compare(a, b interface{}) int { +func Compare(a, b any) int { return v1.Compare(a, b) } diff --git a/vendor/github.com/open-policy-agent/opa/ast/errors.go b/vendor/github.com/open-policy-agent/opa/ast/errors.go index 0cb8ee28f..722cfc0fb 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/errors.go +++ b/vendor/github.com/open-policy-agent/opa/ast/errors.go @@ -41,6 +41,6 @@ type ErrorDetails = v1.ErrorDetails type Error = v1.Error // NewError returns a new Error object. -func NewError(code string, loc *Location, f string, a ...interface{}) *Error { +func NewError(code string, loc *Location, f string, a ...any) *Error { return v1.NewError(code, loc, f, a...) } diff --git a/vendor/github.com/open-policy-agent/opa/ast/policy.go b/vendor/github.com/open-policy-agent/opa/ast/policy.go index 3da7fdd63..5055e8f23 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/policy.go +++ b/vendor/github.com/open-policy-agent/opa/ast/policy.go @@ -211,7 +211,7 @@ func NewBody(exprs ...*Expr) Body { } // NewExpr returns a new Expr object. -func NewExpr(terms interface{}) *Expr { +func NewExpr(terms any) *Expr { return v1.NewExpr(terms) } @@ -222,7 +222,7 @@ func NewBuiltinExpr(terms ...*Term) *Expr { } // Copy returns a deep copy of the AST node x. If x is not an AST node, x is returned unmodified. -func Copy(x interface{}) interface{} { +func Copy(x any) any { return v1.Copy(x) } diff --git a/vendor/github.com/open-policy-agent/opa/ast/pretty.go b/vendor/github.com/open-policy-agent/opa/ast/pretty.go index f2b8104e0..84e42f9ae 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/pretty.go +++ b/vendor/github.com/open-policy-agent/opa/ast/pretty.go @@ -13,6 +13,6 @@ import ( // Pretty writes a pretty representation of the AST rooted at x to w. // // This is function is intended for debug purposes when inspecting ASTs. -func Pretty(w io.Writer, x interface{}) { +func Pretty(w io.Writer, x any) { v1.Pretty(w, x) } diff --git a/vendor/github.com/open-policy-agent/opa/ast/strings.go b/vendor/github.com/open-policy-agent/opa/ast/strings.go index ef9354bf7..c2c81de8b 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/strings.go +++ b/vendor/github.com/open-policy-agent/opa/ast/strings.go @@ -9,6 +9,6 @@ import ( ) // TypeName returns a human readable name for the AST element type. -func TypeName(x interface{}) string { +func TypeName(x any) string { return v1.TypeName(x) } diff --git a/vendor/github.com/open-policy-agent/opa/ast/term.go b/vendor/github.com/open-policy-agent/opa/ast/term.go index a5d146ea2..202355070 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/term.go +++ b/vendor/github.com/open-policy-agent/opa/ast/term.go @@ -30,7 +30,7 @@ func NewLocation(text []byte, file string, row int, col int) *Location { type Value = v1.Value // InterfaceToValue converts a native Go value x to a Value. -func InterfaceToValue(x interface{}) (Value, error) { +func InterfaceToValue(x any) (Value, error) { return v1.InterfaceToValue(x) } @@ -40,7 +40,7 @@ func ValueFromReader(r io.Reader) (Value, error) { } // As converts v into a Go native type referred to by x. -func As(v Value, x interface{}) error { +func As(v Value, x any) error { return v1.As(v, x) } @@ -62,13 +62,13 @@ func IsUnknownValueErr(err error) bool { // ValueToInterface returns the Go representation of an AST value. The AST // value should not contain any values that require evaluation (e.g., vars, // comprehensions, etc.) -func ValueToInterface(v Value, resolver Resolver) (interface{}, error) { +func ValueToInterface(v Value, resolver Resolver) (any, error) { return v1.ValueToInterface(v, resolver) } // JSON returns the JSON representation of v. The value must not contain any // refs or terms that require evaluation (e.g., vars, comprehensions, etc.) -func JSON(v Value) (interface{}, error) { +func JSON(v Value) (any, error) { return v1.JSON(v) } @@ -77,7 +77,7 @@ type JSONOpt = v1.JSONOpt // JSONWithOpt returns the JSON representation of v. The value must not contain any // refs or terms that require evaluation (e.g., vars, comprehensions, etc.) -func JSONWithOpt(v Value, opt JSONOpt) (interface{}, error) { +func JSONWithOpt(v Value, opt JSONOpt) (any, error) { return v1.JSONWithOpt(v, opt) } @@ -85,14 +85,14 @@ func JSONWithOpt(v Value, opt JSONOpt) (interface{}, error) { // refs or terms that require evaluation (e.g., vars, comprehensions, etc.) If // the conversion fails, this function will panic. This function is mostly for // test purposes. -func MustJSON(v Value) interface{} { +func MustJSON(v Value) any { return v1.MustJSON(v) } // MustInterfaceToValue converts a native Go value x to a Value. If the // conversion fails, this function will panic. This function is mostly for test // purposes. -func MustInterfaceToValue(x interface{}) Value { +func MustInterfaceToValue(x any) Value { return v1.MustInterfaceToValue(x) } @@ -115,17 +115,17 @@ func IsComprehension(x Value) bool { } // ContainsRefs returns true if the Value v contains refs. -func ContainsRefs(v interface{}) bool { +func ContainsRefs(v any) bool { return v1.ContainsRefs(v) } // ContainsComprehensions returns true if the Value v contains comprehensions. -func ContainsComprehensions(v interface{}) bool { +func ContainsComprehensions(v any) bool { return v1.ContainsComprehensions(v) } // ContainsClosures returns true if the Value v contains closures. -func ContainsClosures(v interface{}) bool { +func ContainsClosures(v any) bool { return v1.ContainsClosures(v) } @@ -256,7 +256,7 @@ func ObjectTerm(o ...[2]*Term) *Term { return v1.ObjectTerm(o...) } -func LazyObject(blob map[string]interface{}) Object { +func LazyObject(blob map[string]any) Object { return v1.LazyObject(blob) } diff --git a/vendor/github.com/open-policy-agent/opa/ast/transform.go b/vendor/github.com/open-policy-agent/opa/ast/transform.go index cfb137813..8c03c4866 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/transform.go +++ b/vendor/github.com/open-policy-agent/opa/ast/transform.go @@ -16,22 +16,22 @@ type Transformer = v1.Transformer // Transform iterates the AST and calls the Transform function on the // Transformer t for x before recursing. -func Transform(t Transformer, x interface{}) (interface{}, error) { +func Transform(t Transformer, x any) (any, error) { return v1.Transform(t, x) } // TransformRefs calls the function f on all references under x. -func TransformRefs(x interface{}, f func(Ref) (Value, error)) (interface{}, error) { +func TransformRefs(x any, f func(Ref) (Value, error)) (any, error) { return v1.TransformRefs(x, f) } // TransformVars calls the function f on all vars under x. -func TransformVars(x interface{}, f func(Var) (Value, error)) (interface{}, error) { +func TransformVars(x any, f func(Var) (Value, error)) (any, error) { return v1.TransformVars(x, f) } // TransformComprehensions calls the functio nf on all comprehensions under x. -func TransformComprehensions(x interface{}, f func(interface{}) (Value, error)) (interface{}, error) { +func TransformComprehensions(x any, f func(any) (Value, error)) (any, error) { return v1.TransformComprehensions(x, f) } @@ -41,6 +41,6 @@ type GenericTransformer = v1.GenericTransformer // NewGenericTransformer returns a new GenericTransformer that will transform // AST nodes using the function f. -func NewGenericTransformer(f func(x interface{}) (interface{}, error)) *GenericTransformer { +func NewGenericTransformer(f func(x any) (any, error)) *GenericTransformer { return v1.NewGenericTransformer(f) } diff --git a/vendor/github.com/open-policy-agent/opa/ast/visit.go b/vendor/github.com/open-policy-agent/opa/ast/visit.go index 94823c6cc..f4f2459ec 100644 --- a/vendor/github.com/open-policy-agent/opa/ast/visit.go +++ b/vendor/github.com/open-policy-agent/opa/ast/visit.go @@ -21,68 +21,68 @@ type BeforeAndAfterVisitor = v1.BeforeAndAfterVisitor // Walk iterates the AST by calling the Visit function on the Visitor // v for x before recursing. // Deprecated: use GenericVisitor.Walk -func Walk(v Visitor, x interface{}) { +func Walk(v Visitor, x any) { v1.Walk(v, x) } // WalkBeforeAndAfter iterates the AST by calling the Visit function on the // Visitor v for x before recursing. // Deprecated: use GenericVisitor.Walk -func WalkBeforeAndAfter(v BeforeAndAfterVisitor, x interface{}) { +func WalkBeforeAndAfter(v BeforeAndAfterVisitor, x any) { v1.WalkBeforeAndAfter(v, x) } // WalkVars calls the function f on all vars under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkVars(x interface{}, f func(Var) bool) { +func WalkVars(x any, f func(Var) bool) { v1.WalkVars(x, f) } // WalkClosures calls the function f on all closures under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkClosures(x interface{}, f func(interface{}) bool) { +func WalkClosures(x any, f func(any) bool) { v1.WalkClosures(x, f) } // WalkRefs calls the function f on all references under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkRefs(x interface{}, f func(Ref) bool) { +func WalkRefs(x any, f func(Ref) bool) { v1.WalkRefs(x, f) } // WalkTerms calls the function f on all terms under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkTerms(x interface{}, f func(*Term) bool) { +func WalkTerms(x any, f func(*Term) bool) { v1.WalkTerms(x, f) } // WalkWiths calls the function f on all with modifiers under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkWiths(x interface{}, f func(*With) bool) { +func WalkWiths(x any, f func(*With) bool) { v1.WalkWiths(x, f) } // WalkExprs calls the function f on all expressions under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkExprs(x interface{}, f func(*Expr) bool) { +func WalkExprs(x any, f func(*Expr) bool) { v1.WalkExprs(x, f) } // WalkBodies calls the function f on all bodies under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkBodies(x interface{}, f func(Body) bool) { +func WalkBodies(x any, f func(Body) bool) { v1.WalkBodies(x, f) } // WalkRules calls the function f on all rules under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkRules(x interface{}, f func(*Rule) bool) { +func WalkRules(x any, f func(*Rule) bool) { v1.WalkRules(x, f) } // WalkNodes calls the function f on all nodes under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkNodes(x interface{}, f func(Node) bool) { +func WalkNodes(x any, f func(Node) bool) { v1.WalkNodes(x, f) } @@ -93,7 +93,7 @@ type GenericVisitor = v1.GenericVisitor // NewGenericVisitor returns a new GenericVisitor that will invoke the function // f on AST nodes. -func NewGenericVisitor(f func(x interface{}) bool) *GenericVisitor { +func NewGenericVisitor(f func(x any) bool) *GenericVisitor { return v1.NewGenericVisitor(f) } @@ -105,7 +105,7 @@ type BeforeAfterVisitor = v1.BeforeAfterVisitor // NewBeforeAfterVisitor returns a new BeforeAndAfterVisitor that // will invoke the functions before and after AST nodes. -func NewBeforeAfterVisitor(before func(x interface{}) bool, after func(x interface{})) *BeforeAfterVisitor { +func NewBeforeAfterVisitor(before func(x any) bool, after func(x any)) *BeforeAfterVisitor { return v1.NewBeforeAfterVisitor(before, after) } diff --git a/vendor/github.com/open-policy-agent/opa/bundle/store.go b/vendor/github.com/open-policy-agent/opa/bundle/store.go index d73cc7742..9659d67bd 100644 --- a/vendor/github.com/open-policy-agent/opa/bundle/store.go +++ b/vendor/github.com/open-policy-agent/opa/bundle/store.go @@ -7,6 +7,7 @@ package bundle import ( "context" + "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/storage" v1 "github.com/open-policy-agent/opa/v1/bundle" ) @@ -70,7 +71,7 @@ func ReadBundleRevisionFromStore(ctx context.Context, store storage.Store, txn s // ReadBundleMetadataFromStore returns the metadata in the specified bundle. // If the bundle is not activated, this function will return // storage NotFound error. -func ReadBundleMetadataFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, name string) (map[string]interface{}, error) { +func ReadBundleMetadataFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, name string) (map[string]any, error) { return v1.ReadBundleMetadataFromStore(ctx, store, txn, name) } @@ -87,7 +88,7 @@ type ActivateOpts = v1.ActivateOpts // Activate the bundle(s) by loading into the given Store. This will load policies, data, and record // the manifest in storage. The compiler provided will have had the polices compiled on it. func Activate(opts *ActivateOpts) error { - return v1.Activate(opts) + return v1.Activate(setActivateDefaultRegoVersion(opts)) } // DeactivateOpts defines options for the Deactivate API call @@ -95,7 +96,7 @@ type DeactivateOpts = v1.DeactivateOpts // Deactivate the bundle(s). This will erase associated data, policies, and the manifest entry from the store. func Deactivate(opts *DeactivateOpts) error { - return v1.Deactivate(opts) + return v1.Deactivate(setDeactivateDefaultRegoVersion(opts)) } // LegacyWriteManifestToStore will write the bundle manifest to the older single (unnamed) bundle manifest location. @@ -121,3 +122,31 @@ func LegacyReadRevisionFromStore(ctx context.Context, store storage.Store, txn s func ActivateLegacy(opts *ActivateOpts) error { return v1.ActivateLegacy(opts) } + +func setActivateDefaultRegoVersion(opts *ActivateOpts) *ActivateOpts { + if opts == nil { + return nil + } + + if opts.ParserOptions.RegoVersion == ast.RegoUndefined { + cpy := *opts + cpy.ParserOptions.RegoVersion = ast.DefaultRegoVersion + return &cpy + } + + return opts +} + +func setDeactivateDefaultRegoVersion(opts *DeactivateOpts) *DeactivateOpts { + if opts == nil { + return nil + } + + if opts.ParserOptions.RegoVersion == ast.RegoUndefined { + cpy := *opts + cpy.ParserOptions.RegoVersion = ast.DefaultRegoVersion + return &cpy + } + + return opts +} diff --git a/vendor/github.com/open-policy-agent/opa/capabilities/v1.5.0.json b/vendor/github.com/open-policy-agent/opa/capabilities/v1.5.0.json new file mode 100644 index 000000000..1253c88b3 --- /dev/null +++ b/vendor/github.com/open-policy-agent/opa/capabilities/v1.5.0.json @@ -0,0 +1,4849 @@ +{ + "builtins": [ + { + "name": "abs", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "all", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "and", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + }, + "infix": "\u0026" + }, + { + "name": "any", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "array.concat", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "array.reverse", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "array.slice", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "assign", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": ":=" + }, + { + "name": "base64.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "base64url.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64url.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "base64url.encode_no_pad", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "bits.and", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.lsh", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.negate", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.or", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.rsh", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "bits.xor", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "cast_array", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "cast_boolean", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "cast_null", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "null" + }, + "type": "function" + } + }, + { + "name": "cast_object", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "cast_set", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "cast_string", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "ceil", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "concat", + "decl": { + "args": [ + { + "type": "string" + }, + { + "of": [ + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "contains", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "count", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.equal", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.md5", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha1", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.hmac.sha512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.md5", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.parse_private_keys", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.sha1", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.sha256", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_and_verify_certificates", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_and_verify_certificates_with_options", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_certificate_request", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_certificates", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_keypair", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "crypto.x509.parse_rsa_private_key", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "div", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "/" + }, + { + "name": "endswith", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "eq", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "=" + }, + { + "name": "equal", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "==" + }, + { + "name": "floor", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "format_int", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "glob.match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "of": [ + { + "type": "null" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + } + ], + "type": "any" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "glob.quote_meta", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "graph.reachable", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "graph.reachable_paths", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "graphql.is_valid", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "graphql.parse", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_and_verify", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_query", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "graphql.parse_schema", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "graphql.schema_is_valid", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "gt", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003e" + }, + { + "name": "gte", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003e=" + }, + { + "name": "hex.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "hex.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "http.send", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "indexof", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "indexof_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "internal.member_2", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "in" + }, + { + "name": "internal.member_3", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "in" + }, + { + "name": "internal.print", + "decl": { + "args": [ + { + "dynamic": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "array" + } + ], + "type": "function" + } + }, + { + "name": "internal.test_case", + "decl": { + "args": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "function" + } + }, + { + "name": "intersection", + "decl": { + "args": [ + { + "of": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "io.jwt.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "static": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "string" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "io.jwt.decode_verify", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "array" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.encode_sign", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.encode_sign_raw", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "io.jwt.verify_es256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_es512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_hs512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_ps512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs256", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs384", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "io.jwt.verify_rs512", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_array", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_boolean", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_null", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_number", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_object", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_set", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "is_string", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "json.filter", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "json.marshal", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "json.marshal_with_options", + "decl": { + "args": [ + { + "type": "any" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "static": [ + { + "key": "indent", + "value": { + "type": "string" + } + }, + { + "key": "prefix", + "value": { + "type": "string" + } + }, + { + "key": "pretty", + "value": { + "type": "boolean" + } + } + ], + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "json.match_schema", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "dynamic": { + "static": [ + { + "key": "desc", + "value": { + "type": "string" + } + }, + { + "key": "error", + "value": { + "type": "string" + } + }, + { + "key": "field", + "value": { + "type": "string" + } + }, + { + "key": "type", + "value": { + "type": "string" + } + } + ], + "type": "object" + }, + "type": "array" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "json.patch", + "decl": { + "args": [ + { + "type": "any" + }, + { + "dynamic": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "static": [ + { + "key": "op", + "value": { + "type": "string" + } + }, + { + "key": "path", + "value": { + "type": "any" + } + } + ], + "type": "object" + }, + "type": "array" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.remove", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.unmarshal", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "json.verify_schema", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "boolean" + }, + { + "of": [ + { + "type": "null" + }, + { + "type": "string" + } + ], + "type": "any" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "lower", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "lt", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003c" + }, + { + "name": "lte", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "\u003c=" + }, + { + "name": "max", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "min", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "minus", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": [ + { + "type": "number" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + "type": "function" + }, + "infix": "-" + }, + { + "name": "mul", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "*" + }, + { + "name": "neq", + "decl": { + "args": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + }, + "infix": "!=" + }, + { + "name": "net.cidr_contains", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_contains_matches", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "static": [ + { + "type": "any" + }, + { + "type": "any" + } + ], + "type": "array" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_expand", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_intersects", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.cidr_merge", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "of": [ + { + "type": "string" + } + ], + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "net.cidr_overlap", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "net.lookup_ip_addr", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "of": { + "type": "string" + }, + "type": "set" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "numbers.range", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "numbers.range_step", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "object.filter", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.get", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "any" + }, + { + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.keys", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "object.remove", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.subset", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.union", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "object.union_n", + "decl": { + "args": [ + { + "dynamic": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "array" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "opa.runtime", + "decl": { + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "or", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + }, + "infix": "|" + }, + { + "name": "plus", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "+" + }, + { + "name": "print", + "decl": { + "type": "function", + "variadic": { + "type": "any" + } + } + }, + { + "name": "product", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + { + "of": { + "type": "number" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "providers.aws.sign_req", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "key": { + "type": "any" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "rand.intn", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "re_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.find_all_string_submatch_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.find_n", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "number" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.globs_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "regex.replace", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "regex.split", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "regex.template_match", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "rego.metadata.chain", + "decl": { + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "rego.metadata.rule", + "decl": { + "result": { + "type": "any" + }, + "type": "function" + } + }, + { + "name": "rego.parse_module", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "rem", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + }, + "infix": "%" + }, + { + "name": "replace", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "round", + "decl": { + "args": [ + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "semver.compare", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "semver.is_valid", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "set_diff", + "decl": { + "args": [ + { + "of": { + "type": "any" + }, + "type": "set" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "sort", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "of": { + "type": "any" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "split", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + "type": "function" + } + }, + { + "name": "sprintf", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "any" + }, + "type": "array" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "startswith", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.any_prefix_match", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.any_suffix_match", + "decl": { + "args": [ + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "strings.count", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "strings.render_template", + "decl": { + "args": [ + { + "type": "string" + }, + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "strings.replace_n", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "type": "object" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "strings.reverse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "substring", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "sum", + "decl": { + "args": [ + { + "of": [ + { + "dynamic": { + "type": "number" + }, + "type": "array" + }, + { + "of": { + "type": "number" + }, + "type": "set" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.add_date", + "decl": { + "args": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.clock", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.date", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.diff", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + }, + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "static": [ + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + }, + { + "type": "number" + } + ], + "type": "array" + }, + "type": "function" + } + }, + { + "name": "time.format", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "time.now_ns", + "decl": { + "result": { + "type": "number" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "time.parse_duration_ns", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_ns", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.parse_rfc3339_ns", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "time.weekday", + "decl": { + "args": [ + { + "of": [ + { + "type": "number" + }, + { + "static": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "array" + } + ], + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "to_number", + "decl": { + "args": [ + { + "of": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + } + ], + "type": "any" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "trace", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "trim", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_left", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_prefix", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_right", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_space", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "trim_suffix", + "decl": { + "args": [ + { + "type": "string" + }, + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "type_name", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "union", + "decl": { + "args": [ + { + "of": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "set" + } + ], + "result": { + "of": { + "type": "any" + }, + "type": "set" + }, + "type": "function" + } + }, + { + "name": "units.parse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "units.parse_bytes", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "number" + }, + "type": "function" + } + }, + { + "name": "upper", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.decode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.decode_object", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "dynamic": { + "type": "string" + }, + "type": "array" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "urlquery.encode", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "urlquery.encode_object", + "decl": { + "args": [ + { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "of": [ + { + "type": "string" + }, + { + "dynamic": { + "type": "string" + }, + "type": "array" + }, + { + "of": { + "type": "string" + }, + "type": "set" + } + ], + "type": "any" + } + }, + "type": "object" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "uuid.parse", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "dynamic": { + "key": { + "type": "string" + }, + "value": { + "type": "any" + } + }, + "type": "object" + }, + "type": "function" + } + }, + { + "name": "uuid.rfc4122", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "string" + }, + "type": "function" + }, + "nondeterministic": true + }, + { + "name": "walk", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "static": [ + { + "dynamic": { + "type": "any" + }, + "type": "array" + }, + { + "type": "any" + } + ], + "type": "array" + }, + "type": "function" + }, + "relation": true + }, + { + "name": "yaml.is_valid", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "boolean" + }, + "type": "function" + } + }, + { + "name": "yaml.marshal", + "decl": { + "args": [ + { + "type": "any" + } + ], + "result": { + "type": "string" + }, + "type": "function" + } + }, + { + "name": "yaml.unmarshal", + "decl": { + "args": [ + { + "type": "string" + } + ], + "result": { + "type": "any" + }, + "type": "function" + } + } + ], + "wasm_abi_versions": [ + { + "version": 1, + "minor_version": 1 + }, + { + "version": 1, + "minor_version": 2 + } + ], + "features": [ + "rego_v1" + ] +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/config/config.go b/vendor/github.com/open-policy-agent/opa/internal/config/config.go index fdac48772..d4fae5fa6 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/config/config.go +++ b/vendor/github.com/open-policy-agent/opa/internal/config/config.go @@ -70,7 +70,7 @@ func ParseServicesConfig(opts ServiceOptions) (map[string]rest.Client, error) { // read from disk (if specified) and overrides will be applied. If no config file is // specified, the overrides can still be applied to an empty config. func Load(configFile string, overrides []string, overrideFiles []string) ([]byte, error) { - baseConf := map[string]interface{}{} + baseConf := map[string]any{} // User specified config file if configFile != "" { @@ -88,7 +88,7 @@ func Load(configFile string, overrides []string, overrideFiles []string) ([]byte } } - overrideConf := map[string]interface{}{} + overrideConf := map[string]any{} // User specified a config override via --set for _, override := range overrides { @@ -100,7 +100,7 @@ func Load(configFile string, overrides []string, overrideFiles []string) ([]byte // User specified a config override value via --set-file for _, override := range overrideFiles { - reader := func(rs []rune) (interface{}, error) { + reader := func(rs []rune) (any, error) { bytes, err := os.ReadFile(string(rs)) value := strings.TrimSpace(string(bytes)) return value, err @@ -141,21 +141,21 @@ func subEnvVars(s string) string { } // mergeValues will merge source and destination map, preferring values from the source map -func mergeValues(dest map[string]interface{}, src map[string]interface{}) map[string]interface{} { +func mergeValues(dest map[string]any, src map[string]any) map[string]any { for k, v := range src { // If the key doesn't exist already, then just set the key to that value if _, exists := dest[k]; !exists { dest[k] = v continue } - nextMap, ok := v.(map[string]interface{}) + nextMap, ok := v.(map[string]any) // If it isn't another map, overwrite the value if !ok { dest[k] = v continue } // Edge case: If the key exists in the destination, but isn't a map - destMap, isMap := dest[k].(map[string]interface{}) + destMap, isMap := dest[k].(map[string]any) // If the source map has a map for this key, prefer it if !isMap { dest[k] = v diff --git a/vendor/github.com/open-policy-agent/opa/internal/debug/debug.go b/vendor/github.com/open-policy-agent/opa/internal/debug/debug.go index 7b90bd1bb..9448aeb28 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/debug/debug.go +++ b/vendor/github.com/open-policy-agent/opa/internal/debug/debug.go @@ -8,7 +8,7 @@ import ( // Debug allows printing debug messages. type Debug interface { // Printf prints, with a short file:line-number prefix - Printf(format string, args ...interface{}) + Printf(format string, args ...any) // Writer returns the writer being written to, which may be // `io.Discard` if no debug output is requested. Writer() io.Writer diff --git a/vendor/github.com/open-policy-agent/opa/internal/deepcopy/deepcopy.go b/vendor/github.com/open-policy-agent/opa/internal/deepcopy/deepcopy.go index 00e8df6f8..dc3a231bc 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/deepcopy/deepcopy.go +++ b/vendor/github.com/open-policy-agent/opa/internal/deepcopy/deepcopy.go @@ -5,25 +5,25 @@ package deepcopy // DeepCopy performs a recursive deep copy for nested slices/maps and -// returns the copied object. Supports []interface{} -// and map[string]interface{} only -func DeepCopy(val interface{}) interface{} { +// returns the copied object. Supports []any +// and map[string]any only +func DeepCopy(val any) any { switch val := val.(type) { - case []interface{}: - cpy := make([]interface{}, len(val)) + case []any: + cpy := make([]any, len(val)) for i := range cpy { cpy[i] = DeepCopy(val[i]) } return cpy - case map[string]interface{}: + case map[string]any: return Map(val) default: return val } } -func Map(val map[string]interface{}) map[string]interface{} { - cpy := make(map[string]interface{}, len(val)) +func Map(val map[string]any) map[string]any { + cpy := make(map[string]any, len(val)) for k := range val { cpy[k] = DeepCopy(val[k]) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/edittree/bitvector/bitvector.go b/vendor/github.com/open-policy-agent/opa/internal/edittree/bitvector/bitvector.go index 8e4d65ed3..bfacf3bce 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/edittree/bitvector/bitvector.go +++ b/vendor/github.com/open-policy-agent/opa/internal/edittree/bitvector/bitvector.go @@ -51,7 +51,7 @@ func shiftLower(bit byte, b []byte) byte { // position of the first byte in the slice. // This returns the bit that was shifted off the last byte. func shiftHigher(bit byte, b []byte) byte { - for i := 0; i < len(b); i++ { + for i := range b { newByte := b[i] << 1 newByte |= bit bit = (b[i] & 0x80) >> 7 diff --git a/vendor/github.com/open-policy-agent/opa/internal/edittree/edittree.go b/vendor/github.com/open-policy-agent/opa/internal/edittree/edittree.go index 378fe99a3..b8b6572f2 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/edittree/edittree.go +++ b/vendor/github.com/open-policy-agent/opa/internal/edittree/edittree.go @@ -723,15 +723,17 @@ func (e *EditTree) Unfold(path ast.Ref) (*EditTree, error) { return child.Unfold(path[1:]) } + idxt := ast.InternedIntNumberTerm(idx) + // Fall back to looking up the key in e.value. // Extend the tree if key is present. Error otherwise. - if v, err := x.Find(ast.Ref{ast.InternedIntNumberTerm(idx)}); err == nil { + if v, err := x.Find(ast.Ref{idxt}); err == nil { // TODO: Consider a more efficient "Replace" function that special-cases this for arrays instead? - _, err := e.Delete(ast.InternedIntNumberTerm(idx)) + _, err := e.Delete(idxt) if err != nil { return nil, err } - child, err := e.Insert(ast.IntNumberTerm(idx), ast.NewTerm(v)) + child, err := e.Insert(idxt, ast.NewTerm(v)) if err != nil { return nil, err } diff --git a/vendor/github.com/open-policy-agent/opa/internal/future/filter_imports.go b/vendor/github.com/open-policy-agent/opa/internal/future/filter_imports.go index f8037ed63..1586a216a 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/future/filter_imports.go +++ b/vendor/github.com/open-policy-agent/opa/internal/future/filter_imports.go @@ -19,14 +19,12 @@ func FilterFutureImports(imps []*ast.Import) []*ast.Import { return ret } -var keywordsTerm = ast.StringTerm("keywords") - // IsAllFutureKeywords returns true if the passed *ast.Import is `future.keywords` func IsAllFutureKeywords(imp *ast.Import) bool { path := imp.Path.Value.(ast.Ref) return len(path) == 2 && ast.FutureRootDocument.Equal(path[0]) && - path[1].Equal(keywordsTerm) + path[1].Equal(ast.InternedStringTerm("keywords")) } // IsFutureKeyword returns true if the passed *ast.Import is `future.keywords.{kw}` @@ -34,7 +32,7 @@ func IsFutureKeyword(imp *ast.Import, kw string) bool { path := imp.Path.Value.(ast.Ref) return len(path) == 3 && ast.FutureRootDocument.Equal(path[0]) && - path[1].Equal(keywordsTerm) && + path[1].Equal(ast.InternedStringTerm("keywords")) && path[2].Equal(ast.StringTerm(kw)) } @@ -42,7 +40,7 @@ func WhichFutureKeyword(imp *ast.Import) (string, bool) { path := imp.Path.Value.(ast.Ref) if len(path) == 3 && ast.FutureRootDocument.Equal(path[0]) && - path[1].Equal(keywordsTerm) { + path[1].Equal(ast.InternedStringTerm("keywords")) { if str, ok := path[2].Value.(ast.String); ok { return string(str), true } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/draft.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/draft.go index dac1aafda..656804acb 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/draft.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/draft.go @@ -86,12 +86,12 @@ func (dc draftConfigs) GetSchemaURL(draft Draft) string { return "" } -func parseSchemaURL(documentNode interface{}) (string, *Draft, error) { +func parseSchemaURL(documentNode any) (string, *Draft, error) { if _, ok := documentNode.(bool); ok { return "", nil, nil } - m, ok := documentNode.(map[string]interface{}) + m, ok := documentNode.(map[string]any) if !ok { return "", nil, errors.New("schema is invalid") } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/errors.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/errors.go index f7aaf9030..a937d9b3b 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/errors.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/errors.go @@ -212,7 +212,7 @@ type ( ) // newError takes a ResultError type and sets the type, context, description, details, value, and field -func newError(err ResultError, context *JSONContext, value interface{}, locale locale, details ErrorDetails) { +func newError(err ResultError, context *JSONContext, value any, locale locale, details ErrorDetails) { var t string var d string switch err.(type) { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/format_checkers.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/format_checkers.go index 1e770464e..c078e9862 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/format_checkers.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/format_checkers.go @@ -14,7 +14,7 @@ type ( // FormatChecker is the interface all formatters added to FormatCheckerChain must implement FormatChecker interface { // IsFormat checks if input has the correct format - IsFormat(input interface{}) bool + IsFormat(input any) bool } // FormatCheckerChain holds the formatters @@ -174,7 +174,7 @@ func (c *FormatCheckerChain) Has(name string) bool { // IsFormat will check an input against a FormatChecker with the given name // to see if it is the correct format -func (c *FormatCheckerChain) IsFormat(name string, input interface{}) bool { +func (c *FormatCheckerChain) IsFormat(name string, input any) bool { lock.RLock() f, ok := c.formatters[name] lock.RUnlock() @@ -188,7 +188,7 @@ func (c *FormatCheckerChain) IsFormat(name string, input interface{}) bool { } // IsFormat checks if input is a correctly formatted e-mail address -func (f EmailFormatChecker) IsFormat(input interface{}) bool { +func (f EmailFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -199,7 +199,7 @@ func (f EmailFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted IPv4-address -func (f IPV4FormatChecker) IsFormat(input interface{}) bool { +func (f IPV4FormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -211,7 +211,7 @@ func (f IPV4FormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted IPv6=address -func (f IPV6FormatChecker) IsFormat(input interface{}) bool { +func (f IPV6FormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -223,7 +223,7 @@ func (f IPV6FormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted date/time per RFC3339 5.6 -func (f DateTimeFormatChecker) IsFormat(input interface{}) bool { +func (f DateTimeFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -247,7 +247,7 @@ func (f DateTimeFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted date (YYYY-MM-DD) -func (f DateFormatChecker) IsFormat(input interface{}) bool { +func (f DateFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -257,7 +257,7 @@ func (f DateFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input correctly formatted time (HH:MM:SS or HH:MM:SSZ-07:00) -func (f TimeFormatChecker) IsFormat(input interface{}) bool { +func (f TimeFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -272,7 +272,7 @@ func (f TimeFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is correctly formatted URI with a valid Scheme per RFC3986 -func (f URIFormatChecker) IsFormat(input interface{}) bool { +func (f URIFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -288,7 +288,7 @@ func (f URIFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted URI or relative-reference per RFC3986 -func (f URIReferenceFormatChecker) IsFormat(input interface{}) bool { +func (f URIReferenceFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -299,7 +299,7 @@ func (f URIReferenceFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted URI template per RFC6570 -func (f URITemplateFormatChecker) IsFormat(input interface{}) bool { +func (f URITemplateFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -314,7 +314,7 @@ func (f URITemplateFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted hostname -func (f HostnameFormatChecker) IsFormat(input interface{}) bool { +func (f HostnameFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -324,7 +324,7 @@ func (f HostnameFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted UUID -func (f UUIDFormatChecker) IsFormat(input interface{}) bool { +func (f UUIDFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -334,7 +334,7 @@ func (f UUIDFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted regular expression -func (f RegexFormatChecker) IsFormat(input interface{}) bool { +func (f RegexFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -348,7 +348,7 @@ func (f RegexFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted JSON Pointer per RFC6901 -func (f JSONPointerFormatChecker) IsFormat(input interface{}) bool { +func (f JSONPointerFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true @@ -358,7 +358,7 @@ func (f JSONPointerFormatChecker) IsFormat(input interface{}) bool { } // IsFormat checks if input is a correctly formatted relative JSON Pointer -func (f RelativeJSONPointerFormatChecker) IsFormat(input interface{}) bool { +func (f RelativeJSONPointerFormatChecker) IsFormat(input any) bool { asString, ok := input.(string) if !ok { return true diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/internalLog.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/internalLog.go index 4ef7a8d03..bab75112e 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/internalLog.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/internalLog.go @@ -32,6 +32,6 @@ import ( const internalLogEnabled = false -func internalLog(format string, v ...interface{}) { +func internalLog(format string, v ...any) { log.Printf(format, v...) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/jsonLoader.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/jsonLoader.go index 1011552de..73f25e3b7 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/jsonLoader.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/jsonLoader.go @@ -77,8 +77,8 @@ var osFS = osFileSystem(os.Open) // JSONLoader defines the JSON loader interface type JSONLoader interface { - JSONSource() interface{} - LoadJSON() (interface{}, error) + JSONSource() any + LoadJSON() (any, error) JSONReference() (gojsonreference.JsonReference, error) LoaderFactory() JSONLoaderFactory } @@ -130,7 +130,7 @@ type jsonReferenceLoader struct { source string } -func (l *jsonReferenceLoader) JSONSource() interface{} { +func (l *jsonReferenceLoader) JSONSource() any { return l.source } @@ -160,7 +160,7 @@ func NewReferenceLoaderFileSystem(source string, fs http.FileSystem) JSONLoader } } -func (l *jsonReferenceLoader) LoadJSON() (interface{}, error) { +func (l *jsonReferenceLoader) LoadJSON() (any, error) { var err error @@ -207,7 +207,7 @@ func (l *jsonReferenceLoader) LoadJSON() (interface{}, error) { return nil, fmt.Errorf("remote reference loading disabled: %s", reference.String()) } -func (l *jsonReferenceLoader) loadFromHTTP(address string) (interface{}, error) { +func (l *jsonReferenceLoader) loadFromHTTP(address string) (any, error) { resp, err := http.Get(address) if err != nil { @@ -227,7 +227,7 @@ func (l *jsonReferenceLoader) loadFromHTTP(address string) (interface{}, error) return decodeJSONUsingNumber(bytes.NewReader(bodyBuff)) } -func (l *jsonReferenceLoader) loadFromFile(path string) (interface{}, error) { +func (l *jsonReferenceLoader) loadFromFile(path string) (any, error) { f, err := l.fs.Open(path) if err != nil { return nil, err @@ -249,7 +249,7 @@ type jsonStringLoader struct { source string } -func (l *jsonStringLoader) JSONSource() interface{} { +func (l *jsonStringLoader) JSONSource() any { return l.source } @@ -266,7 +266,7 @@ func NewStringLoader(source string) JSONLoader { return &jsonStringLoader{source: source} } -func (l *jsonStringLoader) LoadJSON() (interface{}, error) { +func (l *jsonStringLoader) LoadJSON() (any, error) { return decodeJSONUsingNumber(strings.NewReader(l.JSONSource().(string))) @@ -278,7 +278,7 @@ type jsonBytesLoader struct { source []byte } -func (l *jsonBytesLoader) JSONSource() interface{} { +func (l *jsonBytesLoader) JSONSource() any { return l.source } @@ -295,18 +295,18 @@ func NewBytesLoader(source []byte) JSONLoader { return &jsonBytesLoader{source: source} } -func (l *jsonBytesLoader) LoadJSON() (interface{}, error) { +func (l *jsonBytesLoader) LoadJSON() (any, error) { return decodeJSONUsingNumber(bytes.NewReader(l.JSONSource().([]byte))) } // JSON Go (types) loader -// used to load JSONs from the code as maps, interface{}, structs ... +// used to load JSONs from the code as maps, any, structs ... type jsonGoLoader struct { - source interface{} + source any } -func (l *jsonGoLoader) JSONSource() interface{} { +func (l *jsonGoLoader) JSONSource() any { return l.source } @@ -319,11 +319,11 @@ func (l *jsonGoLoader) LoaderFactory() JSONLoaderFactory { } // NewGoLoader creates a new JSONLoader from a given Go struct -func NewGoLoader(source interface{}) JSONLoader { +func NewGoLoader(source any) JSONLoader { return &jsonGoLoader{source: source} } -func (l *jsonGoLoader) LoadJSON() (interface{}, error) { +func (l *jsonGoLoader) LoadJSON() (any, error) { // convert it to a compliant JSON first to avoid types "mismatches" @@ -352,11 +352,11 @@ func NewWriterLoader(source io.Writer) (JSONLoader, io.Writer) { return &jsonIOLoader{buf: buf}, io.MultiWriter(source, buf) } -func (l *jsonIOLoader) JSONSource() interface{} { +func (l *jsonIOLoader) JSONSource() any { return l.buf.String() } -func (l *jsonIOLoader) LoadJSON() (interface{}, error) { +func (l *jsonIOLoader) LoadJSON() (any, error) { return decodeJSONUsingNumber(l.buf) } @@ -369,21 +369,21 @@ func (l *jsonIOLoader) LoaderFactory() JSONLoaderFactory { } // JSON raw loader -// In case the JSON is already marshalled to interface{} use this loader +// In case the JSON is already marshalled to any use this loader // This is used for testing as otherwise there is no guarantee the JSON is marshalled // "properly" by using https://golang.org/pkg/encoding/json/#Decoder.UseNumber type jsonRawLoader struct { - source interface{} + source any } // NewRawLoader creates a new JSON raw loader for the given source -func NewRawLoader(source interface{}) JSONLoader { +func NewRawLoader(source any) JSONLoader { return &jsonRawLoader{source: source} } -func (l *jsonRawLoader) JSONSource() interface{} { +func (l *jsonRawLoader) JSONSource() any { return l.source } -func (l *jsonRawLoader) LoadJSON() (interface{}, error) { +func (l *jsonRawLoader) LoadJSON() (any, error) { return l.source, nil } func (l *jsonRawLoader) JSONReference() (gojsonreference.JsonReference, error) { @@ -393,9 +393,9 @@ func (l *jsonRawLoader) LoaderFactory() JSONLoaderFactory { return &DefaultJSONLoaderFactory{} } -func decodeJSONUsingNumber(r io.Reader) (interface{}, error) { +func decodeJSONUsingNumber(r io.Reader) (any, error) { - var document interface{} + var document any decoder := json.NewDecoder(r) decoder.UseNumber() diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/result.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/result.go index 8baff0717..0329721c2 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/result.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/result.go @@ -33,7 +33,7 @@ import ( type ( // ErrorDetails is a map of details specific to each error. // While the values will vary, every error will contain a "field" value - ErrorDetails map[string]interface{} + ErrorDetails map[string]any // ResultError is the interface that library errors must implement ResultError interface { @@ -57,9 +57,9 @@ type ( // DescriptionFormat returns the format for the description in the default text/template format DescriptionFormat() string // SetValue sets the value related to the error - SetValue(interface{}) + SetValue(any) // Value returns the value related to the error - Value() interface{} + Value() any // SetDetails sets the details specific to the error SetDetails(ErrorDetails) // Details returns details about the error @@ -76,7 +76,7 @@ type ( context *JSONContext // Tree like notation of the part that failed the validation. ex (root).a.b ... description string // A human readable error message descriptionFormat string // A format for human readable error message - value interface{} // Value given by the JSON file that is the source of the error + value any // Value given by the JSON file that is the source of the error details ErrorDetails } @@ -136,12 +136,12 @@ func (v *ResultErrorFields) DescriptionFormat() string { } // SetValue sets the value related to the error -func (v *ResultErrorFields) SetValue(value interface{}) { +func (v *ResultErrorFields) SetValue(value any) { v.value = value } // Value returns the value related to the error -func (v *ResultErrorFields) Value() interface{} { +func (v *ResultErrorFields) Value() any { return v.value } @@ -203,7 +203,7 @@ func (v *Result) AddError(err ResultError, details ErrorDetails) { v.errors = append(v.errors, err) } -func (v *Result) addInternalError(err ResultError, context *JSONContext, value interface{}, details ErrorDetails) { +func (v *Result) addInternalError(err ResultError, context *JSONContext, value any, details ErrorDetails) { newError(err, context, value, Locale, details) v.errors = append(v.errors, err) v.score -= 2 // results in a net -1 when added to the +1 we get at the end of the validation function diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schema.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schema.go index 8e035013c..e8007ee2b 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schema.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schema.go @@ -58,7 +58,7 @@ type Schema struct { ReferencePool *schemaReferencePool } -func (d *Schema) parse(document interface{}, draft Draft) error { +func (d *Schema) parse(document any, draft Draft) error { d.RootSchema = &SubSchema{Property: StringRootSchemaProperty, Draft: &draft} return d.parseSchema(document, d.RootSchema) } @@ -73,7 +73,7 @@ func (d *Schema) SetRootSchemaName(name string) { // Pretty long function ( sorry :) )... but pretty straight forward, repetitive and boring // Not much magic involved here, most of the job is to validate the key names and their values, // then the values are copied into SubSchema struct -func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) error { +func (d *Schema) parseSchema(documentNode any, currentSchema *SubSchema) error { if currentSchema.Draft == nil { if currentSchema.Parent == nil { @@ -90,7 +90,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) } } - m, isMap := documentNode.(map[string]interface{}) + m, isMap := documentNode.(map[string]any) if !isMap { return errors.New(formatErrorDescription( Locale.ParseError(), @@ -146,10 +146,10 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) // definitions if v, ok := m[KeyDefinitions]; ok { switch mt := v.(type) { - case map[string]interface{}: + case map[string]any: for _, dv := range mt { switch dv.(type) { - case bool, map[string]interface{}: + case bool, map[string]any: newSchema := &SubSchema{Property: KeyDefinitions, Parent: currentSchema} err := d.parseSchema(dv, newSchema) if err != nil { @@ -203,7 +203,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) if err != nil { return err } - case []interface{}: + case []any: for _, typeInArray := range t { s, isString := typeInArray.(string) if !isString { @@ -231,7 +231,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) switch v := additionalProperties.(type) { case bool: currentSchema.additionalProperties = v - case map[string]interface{}: + case map[string]any: newSchema := &SubSchema{Property: KeyAdditionalProperties, Parent: currentSchema, Ref: currentSchema.Ref} currentSchema.additionalProperties = newSchema err := d.parseSchema(v, newSchema) @@ -270,7 +270,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) // propertyNames if propertyNames, found := m[KeyPropertyNames]; found && *currentSchema.Draft >= Draft6 { switch propertyNames.(type) { - case bool, map[string]interface{}: + case bool, map[string]any: newSchema := &SubSchema{Property: KeyPropertyNames, Parent: currentSchema, Ref: currentSchema.Ref} currentSchema.propertyNames = newSchema err := d.parseSchema(propertyNames, newSchema) @@ -299,10 +299,10 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) // items if items, found := m[KeyItems]; found { switch i := items.(type) { - case []interface{}: + case []any: for _, itemElement := range i { switch itemElement.(type) { - case map[string]interface{}, bool: + case map[string]any, bool: newSchema := &SubSchema{Parent: currentSchema, Property: KeyItems} newSchema.Ref = currentSchema.Ref currentSchema.ItemsChildren = append(currentSchema.ItemsChildren, newSchema) @@ -315,7 +315,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) } currentSchema.ItemsChildrenIsSingleSchema = false } - case map[string]interface{}, bool: + case map[string]any, bool: newSchema := &SubSchema{Parent: currentSchema, Property: KeyItems} newSchema.Ref = currentSchema.Ref currentSchema.ItemsChildren = append(currentSchema.ItemsChildren, newSchema) @@ -334,7 +334,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) switch i := additionalItems.(type) { case bool: currentSchema.additionalItems = i - case map[string]interface{}: + case map[string]any: newSchema := &SubSchema{Property: KeyAdditionalItems, Parent: currentSchema, Ref: currentSchema.Ref} currentSchema.additionalItems = newSchema err := d.parseSchema(additionalItems, newSchema) @@ -717,7 +717,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) if vNot, found := m[KeyNot]; found { switch vNot.(type) { - case bool, map[string]interface{}: + case bool, map[string]any: newSchema := &SubSchema{Property: KeyNot, Parent: currentSchema, Ref: currentSchema.Ref} currentSchema.not = newSchema err := d.parseSchema(vNot, newSchema) @@ -735,7 +735,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) if *currentSchema.Draft >= Draft7 { if vIf, found := m[KeyIf]; found { switch vIf.(type) { - case bool, map[string]interface{}: + case bool, map[string]any: newSchema := &SubSchema{Property: KeyIf, Parent: currentSchema, Ref: currentSchema.Ref} currentSchema._if = newSchema err := d.parseSchema(vIf, newSchema) @@ -752,7 +752,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) if then, found := m[KeyThen]; found { switch then.(type) { - case bool, map[string]interface{}: + case bool, map[string]any: newSchema := &SubSchema{Property: KeyThen, Parent: currentSchema, Ref: currentSchema.Ref} currentSchema._then = newSchema err := d.parseSchema(then, newSchema) @@ -769,7 +769,7 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) if vElse, found := m[KeyElse]; found { switch vElse.(type) { - case bool, map[string]interface{}: + case bool, map[string]any: newSchema := &SubSchema{Property: KeyElse, Parent: currentSchema, Ref: currentSchema.Ref} currentSchema._else = newSchema err := d.parseSchema(vElse, newSchema) @@ -788,9 +788,9 @@ func (d *Schema) parseSchema(documentNode interface{}, currentSchema *SubSchema) return nil } -func (d *Schema) parseReference(_ interface{}, currentSchema *SubSchema) error { +func (d *Schema) parseReference(_ any, currentSchema *SubSchema) error { var ( - refdDocumentNode interface{} + refdDocumentNode any dsp *schemaPoolDocument err error ) @@ -809,7 +809,7 @@ func (d *Schema) parseReference(_ interface{}, currentSchema *SubSchema) error { newSchema.Draft = dsp.Draft switch refdDocumentNode.(type) { - case bool, map[string]interface{}: + case bool, map[string]any: // expected default: return errors.New(formatErrorDescription( @@ -829,8 +829,8 @@ func (d *Schema) parseReference(_ interface{}, currentSchema *SubSchema) error { } -func (d *Schema) parseProperties(documentNode interface{}, currentSchema *SubSchema) error { - m, isMap := documentNode.(map[string]interface{}) +func (d *Schema) parseProperties(documentNode any, currentSchema *SubSchema) error { + m, isMap := documentNode.(map[string]any) if !isMap { return errors.New(formatErrorDescription( Locale.MustBeOfType(), @@ -851,19 +851,19 @@ func (d *Schema) parseProperties(documentNode interface{}, currentSchema *SubSch return nil } -func (d *Schema) parseDependencies(documentNode interface{}, currentSchema *SubSchema) error { - m, isMap := documentNode.(map[string]interface{}) +func (d *Schema) parseDependencies(documentNode any, currentSchema *SubSchema) error { + m, isMap := documentNode.(map[string]any) if !isMap { return errors.New(formatErrorDescription( Locale.MustBeOfType(), ErrorDetails{"key": KeyDependencies, "type": TypeObject}, )) } - currentSchema.dependencies = make(map[string]interface{}) + currentSchema.dependencies = make(map[string]any) for k := range m { switch values := m[k].(type) { - case []interface{}: + case []any: var valuesToRegister []string for _, value := range values { str, isString := value.(string) @@ -880,7 +880,7 @@ func (d *Schema) parseDependencies(documentNode interface{}, currentSchema *SubS currentSchema.dependencies[k] = valuesToRegister } - case bool, map[string]interface{}: + case bool, map[string]any: depSchema := &SubSchema{Property: k, Parent: currentSchema, Ref: currentSchema.Ref} err := d.parseSchema(m[k], depSchema) if err != nil { @@ -913,7 +913,7 @@ func invalidType(expected, given string) error { )) } -func getString(m map[string]interface{}, key string) (*string, error) { +func getString(m map[string]any, key string) (*string, error) { v, found := m[key] if !found { // not found @@ -927,13 +927,13 @@ func getString(m map[string]interface{}, key string) (*string, error) { return &s, nil } -func getMap(m map[string]interface{}, key string) (map[string]interface{}, error) { +func getMap(m map[string]any, key string) (map[string]any, error) { v, found := m[key] if !found { // not found return nil, nil } - s, isMap := v.(map[string]interface{}) + s, isMap := v.(map[string]any) if !isMap { // wrong type return nil, invalidType(StringSchema, key) @@ -941,12 +941,12 @@ func getMap(m map[string]interface{}, key string) (map[string]interface{}, error return s, nil } -func getSlice(m map[string]interface{}, key string) ([]interface{}, error) { +func getSlice(m map[string]any, key string) ([]any, error) { v, found := m[key] if !found { return nil, nil } - s, isArray := v.([]interface{}) + s, isArray := v.([]any) if !isArray { return nil, errors.New(formatErrorDescription( Locale.MustBeOfAn(), diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaLoader.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaLoader.go index 8cc6dc03b..88caa65de 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaLoader.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaLoader.go @@ -45,7 +45,7 @@ func NewSchemaLoader() *SchemaLoader { return ps } -func (sl *SchemaLoader) validateMetaschema(documentNode interface{}) error { +func (sl *SchemaLoader) validateMetaschema(documentNode any) error { var ( schema string @@ -158,7 +158,7 @@ func (sl *SchemaLoader) Compile(rootSchema JSONLoader) (*Schema, error) { d.DocumentReference = ref d.ReferencePool = newSchemaReferencePool() - var doc interface{} + var doc any if ref.String() != "" { // Get document from schema pool spd, err := d.Pool.GetDocument(d.DocumentReference) diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaPool.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaPool.go index ed8ff688b..513f8df2c 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaPool.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaPool.go @@ -34,7 +34,7 @@ import ( ) type schemaPoolDocument struct { - Document interface{} + Document any Draft *Draft } @@ -44,7 +44,7 @@ type schemaPool struct { autoDetect *bool } -func (p *schemaPool) parseReferences(document interface{}, ref gojsonreference.JsonReference, pooled bool) error { +func (p *schemaPool) parseReferences(document any, ref gojsonreference.JsonReference, pooled bool) error { var ( draft *Draft @@ -72,7 +72,7 @@ func (p *schemaPool) parseReferences(document interface{}, ref gojsonreference.J return err } -func (p *schemaPool) parseReferencesRecursive(document interface{}, ref gojsonreference.JsonReference, draft *Draft) error { +func (p *schemaPool) parseReferencesRecursive(document any, ref gojsonreference.JsonReference, draft *Draft) error { // parseReferencesRecursive parses a JSON document and resolves all $id and $ref references. // For $ref references it takes into account the $id scope it is in and replaces // the reference by the absolute resolved reference @@ -80,14 +80,14 @@ func (p *schemaPool) parseReferencesRecursive(document interface{}, ref gojsonre // When encountering errors it fails silently. Error handling is done when the schema // is syntactically parsed and any error encountered here should also come up there. switch m := document.(type) { - case []interface{}: + case []any: for _, v := range m { err := p.parseReferencesRecursive(v, ref, draft) if err != nil { return err } } - case map[string]interface{}: + case map[string]any: localRef := &ref keyID := KeyIDNew @@ -129,7 +129,7 @@ func (p *schemaPool) parseReferencesRecursive(document interface{}, ref gojsonre // Something like a property or a dependency is not a valid schema, as it might describe properties named "$ref", "$id" or "const", etc // Therefore don't treat it like a schema. if k == KeyProperties || k == KeyDependencies || k == KeyPatternProperties { - if child, ok := v.(map[string]interface{}); ok { + if child, ok := v.(map[string]any); ok { for _, v := range child { err := p.parseReferencesRecursive(v, *localRef, draft) if err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaType.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaType.go index 271832d33..4abcc6814 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaType.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/schemaType.go @@ -28,6 +28,7 @@ package gojsonschema import ( "errors" "fmt" + "slices" "strings" ) @@ -58,13 +59,7 @@ func (t *jsonSchemaType) Add(etype string) error { func (t *jsonSchemaType) Contains(etype string) bool { - for _, v := range t.types { - if v == etype { - return true - } - } - - return false + return slices.Contains(t.types, etype) } func (t *jsonSchemaType) String() string { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/subSchema.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/subSchema.go index d8bc0cb56..b7ceb3136 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/subSchema.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/subSchema.go @@ -123,8 +123,8 @@ type SubSchema struct { maxProperties *int required []string - dependencies map[string]interface{} - additionalProperties interface{} + dependencies map[string]any + additionalProperties any patternProperties map[string]*SubSchema propertyNames *SubSchema @@ -134,7 +134,7 @@ type SubSchema struct { uniqueItems bool contains *SubSchema - additionalItems interface{} + additionalItems any // validation : all _const *string //const is a golang keyword diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/utils.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/utils.go index fd0f1870f..ca071930f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/utils.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/utils.go @@ -29,18 +29,14 @@ package gojsonschema import ( "encoding/json" "math/big" + "slices" ) func isStringInSlice(s []string, what string) bool { - for i := range s { - if s[i] == what { - return true - } - } - return false + return slices.Contains(s, what) } -func marshalToJSONString(value interface{}) (*string, error) { +func marshalToJSONString(value any) (*string, error) { mBytes, err := json.Marshal(value) if err != nil { @@ -51,7 +47,7 @@ func marshalToJSONString(value interface{}) (*string, error) { return &sBytes, nil } -func marshalWithoutNumber(value interface{}) (*string, error) { +func marshalWithoutNumber(value any) (*string, error) { // The JSON is decoded using https://golang.org/pkg/encoding/json/#Decoder.UseNumber // This means the numbers are internally still represented as strings and therefore 1.00 is unequal to 1 @@ -63,7 +59,7 @@ func marshalWithoutNumber(value interface{}) (*string, error) { return nil, err } - var document interface{} + var document any err = json.Unmarshal([]byte(*jsonString), &document) if err != nil { @@ -73,7 +69,7 @@ func marshalWithoutNumber(value interface{}) (*string, error) { return marshalToJSONString(document) } -func isJSONNumber(what interface{}) bool { +func isJSONNumber(what any) bool { switch what.(type) { @@ -84,7 +80,7 @@ func isJSONNumber(what interface{}) bool { return false } -func checkJSONInteger(what interface{}) (isInt bool) { +func checkJSONInteger(what any) (isInt bool) { jsonNumber := what.(json.Number) @@ -100,7 +96,7 @@ const ( minJSONFloat = -float64(1<<53 - 1) //-9007199254740991.0 -2^53 - 1 ) -func mustBeInteger(what interface{}) *int { +func mustBeInteger(what any) *int { number, ok := what.(json.Number) if !ok { return nil @@ -123,7 +119,7 @@ func mustBeInteger(what interface{}) *int { return &int32Value } -func mustBeNumber(what interface{}) *big.Rat { +func mustBeNumber(what any) *big.Rat { number, ok := what.(json.Number) if !ok { return nil @@ -136,11 +132,11 @@ func mustBeNumber(what interface{}) *big.Rat { return nil } -func convertDocumentNode(val interface{}) interface{} { +func convertDocumentNode(val any) any { - if lval, ok := val.([]interface{}); ok { + if lval, ok := val.([]any); ok { - res := []interface{}{} + res := []any{} for _, v := range lval { res = append(res, convertDocumentNode(v)) } @@ -149,9 +145,9 @@ func convertDocumentNode(val interface{}) interface{} { } - if mval, ok := val.(map[interface{}]interface{}); ok { + if mval, ok := val.(map[any]any); ok { - res := map[string]interface{}{} + res := map[string]any{} for k, v := range mval { res[k.(string)] = convertDocumentNode(v) diff --git a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/validation.go b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/validation.go index efdea58b6..e33a0f3d2 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/validation.go +++ b/vendor/github.com/open-policy-agent/opa/internal/gojsonschema/validation.go @@ -54,21 +54,21 @@ func (v *Schema) Validate(l JSONLoader) (*Result, error) { return v.validateDocument(root), nil } -func (v *Schema) validateDocument(root interface{}) *Result { +func (v *Schema) validateDocument(root any) *Result { result := &Result{} context := NewJSONContext(StringContextRoot, nil) v.RootSchema.validateRecursive(v.RootSchema, root, result, context) return result } -func (v *SubSchema) subValidateWithContext(document interface{}, context *JSONContext) *Result { +func (v *SubSchema) subValidateWithContext(document any, context *JSONContext) *Result { result := &Result{} v.validateRecursive(v, document, result, context) return result } // Walker function to validate the json recursively against the SubSchema -func (v *SubSchema) validateRecursive(currentSubSchema *SubSchema, currentNode interface{}, result *Result, context *JSONContext) { +func (v *SubSchema) validateRecursive(currentSubSchema *SubSchema, currentNode any, result *Result, context *JSONContext) { if internalLogEnabled { internalLog("validateRecursive %s", context.String()) @@ -167,7 +167,7 @@ func (v *SubSchema) validateRecursive(currentSubSchema *SubSchema, currentNode i return } - castCurrentNode := currentNode.([]interface{}) + castCurrentNode := currentNode.([]any) currentSubSchema.validateSchema(currentSubSchema, castCurrentNode, result, context) @@ -190,9 +190,9 @@ func (v *SubSchema) validateRecursive(currentSubSchema *SubSchema, currentNode i return } - castCurrentNode, ok := currentNode.(map[string]interface{}) + castCurrentNode, ok := currentNode.(map[string]any) if !ok { - castCurrentNode = convertDocumentNode(currentNode).(map[string]interface{}) + castCurrentNode = convertDocumentNode(currentNode).(map[string]any) } currentSubSchema.validateSchema(currentSubSchema, castCurrentNode, result, context) @@ -264,7 +264,7 @@ func (v *SubSchema) validateRecursive(currentSubSchema *SubSchema, currentNode i } // Different kinds of validation there, SubSchema / common / array / object / string... -func (v *SubSchema) validateSchema(currentSubSchema *SubSchema, currentNode interface{}, result *Result, context *JSONContext) { +func (v *SubSchema) validateSchema(currentSubSchema *SubSchema, currentNode any, result *Result, context *JSONContext) { if internalLogEnabled { internalLog("validateSchema %s", context.String()) @@ -349,14 +349,14 @@ func (v *SubSchema) validateSchema(currentSubSchema *SubSchema, currentNode inte } if len(currentSubSchema.dependencies) > 0 { - if currentNodeMap, ok := currentNode.(map[string]interface{}); ok { + if currentNodeMap, ok := currentNode.(map[string]any); ok { for elementKey := range currentNodeMap { if dependency, ok := currentSubSchema.dependencies[elementKey]; ok { switch dependency := dependency.(type) { case []string: for _, dependOnKey := range dependency { - if _, dependencyResolved := currentNode.(map[string]interface{})[dependOnKey]; !dependencyResolved { + if _, dependencyResolved := currentNode.(map[string]any)[dependOnKey]; !dependencyResolved { result.addInternalError( new(MissingDependencyError), context, @@ -395,7 +395,7 @@ func (v *SubSchema) validateSchema(currentSubSchema *SubSchema, currentNode inte result.incrementScore() } -func (v *SubSchema) validateCommon(currentSubSchema *SubSchema, value interface{}, result *Result, context *JSONContext) { +func (v *SubSchema) validateCommon(currentSubSchema *SubSchema, value any, result *Result, context *JSONContext) { if internalLogEnabled { internalLog("validateCommon %s", context.String()) @@ -452,7 +452,7 @@ func (v *SubSchema) validateCommon(currentSubSchema *SubSchema, value interface{ result.incrementScore() } -func (v *SubSchema) validateArray(currentSubSchema *SubSchema, value []interface{}, result *Result, context *JSONContext) { +func (v *SubSchema) validateArray(currentSubSchema *SubSchema, value []any, result *Result, context *JSONContext) { if internalLogEnabled { internalLog("validateArray %s", context.String()) @@ -578,7 +578,7 @@ func (v *SubSchema) validateArray(currentSubSchema *SubSchema, value []interface result.incrementScore() } -func (v *SubSchema) validateObject(currentSubSchema *SubSchema, value map[string]interface{}, result *Result, context *JSONContext) { +func (v *SubSchema) validateObject(currentSubSchema *SubSchema, value map[string]any, result *Result, context *JSONContext) { if internalLogEnabled { internalLog("validateObject %s", context.String()) @@ -675,7 +675,7 @@ func (v *SubSchema) validateObject(currentSubSchema *SubSchema, value map[string result.incrementScore() } -func (v *SubSchema) validatePatternProperty(currentSubSchema *SubSchema, key string, value interface{}, result *Result, context *JSONContext) bool { +func (v *SubSchema) validatePatternProperty(currentSubSchema *SubSchema, key string, value any, result *Result, context *JSONContext) bool { if internalLogEnabled { internalLog("validatePatternProperty %s", context.String()) @@ -701,7 +701,7 @@ func (v *SubSchema) validatePatternProperty(currentSubSchema *SubSchema, key str return true } -func (v *SubSchema) validateString(currentSubSchema *SubSchema, value interface{}, result *Result, context *JSONContext) { +func (v *SubSchema) validateString(currentSubSchema *SubSchema, value any, result *Result, context *JSONContext) { // Ignore JSON numbers stringValue, isString := value.(string) @@ -752,7 +752,7 @@ func (v *SubSchema) validateString(currentSubSchema *SubSchema, value interface{ result.incrementScore() } -func (v *SubSchema) validateNumber(currentSubSchema *SubSchema, value interface{}, result *Result, context *JSONContext) { +func (v *SubSchema) validateNumber(currentSubSchema *SubSchema, value any, result *Result, context *JSONContext) { // Ignore non numbers number, isNumber := value.(json.Number) diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/prelude.graphql b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/prelude.graphql deleted file mode 100644 index bdca0096a..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/prelude.graphql +++ /dev/null @@ -1,121 +0,0 @@ -# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema - -"The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1." -scalar Int - -"The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point)." -scalar Float - -"The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text." -scalar String - -"The `Boolean` scalar type represents `true` or `false`." -scalar Boolean - -"""The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as "4") or integer (such as 4) input value will be accepted as an ID.""" -scalar ID - -"The @include directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional inclusion during execution as described by the if argument." -directive @include(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT - -"The @skip directive may be provided for fields, fragment spreads, and inline fragments, and allows for conditional exclusion during execution as described by the if argument." -directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT - -"The @deprecated built-in directive is used within the type system definition language to indicate deprecated portions of a GraphQL service's schema, such as deprecated fields on a type, arguments on a field, input fields on an input type, or values of an enum type." -directive @deprecated(reason: String = "No longer supported") on FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE - -"The @specifiedBy built-in directive is used within the type system definition language to provide a scalar specification URL for specifying the behavior of custom scalar types." -directive @specifiedBy(url: String!) on SCALAR - -type __Schema { - description: String - types: [__Type!]! - queryType: __Type! - mutationType: __Type - subscriptionType: __Type - directives: [__Directive!]! -} - -type __Type { - kind: __TypeKind! - name: String - description: String - # must be non-null for OBJECT and INTERFACE, otherwise null. - fields(includeDeprecated: Boolean = false): [__Field!] - # must be non-null for OBJECT and INTERFACE, otherwise null. - interfaces: [__Type!] - # must be non-null for INTERFACE and UNION, otherwise null. - possibleTypes: [__Type!] - # must be non-null for ENUM, otherwise null. - enumValues(includeDeprecated: Boolean = false): [__EnumValue!] - # must be non-null for INPUT_OBJECT, otherwise null. - inputFields: [__InputValue!] - # must be non-null for NON_NULL and LIST, otherwise null. - ofType: __Type - # may be non-null for custom SCALAR, otherwise null. - specifiedByURL: String -} - -type __Field { - name: String! - description: String - args: [__InputValue!]! - type: __Type! - isDeprecated: Boolean! - deprecationReason: String -} - -type __InputValue { - name: String! - description: String - type: __Type! - defaultValue: String -} - -type __EnumValue { - name: String! - description: String - isDeprecated: Boolean! - deprecationReason: String -} - -enum __TypeKind { - SCALAR - OBJECT - INTERFACE - UNION - ENUM - INPUT_OBJECT - LIST - NON_NULL -} - -type __Directive { - name: String! - description: String - locations: [__DirectiveLocation!]! - args: [__InputValue!]! - isRepeatable: Boolean! -} - -enum __DirectiveLocation { - QUERY - MUTATION - SUBSCRIPTION - FIELD - FRAGMENT_DEFINITION - FRAGMENT_SPREAD - INLINE_FRAGMENT - VARIABLE_DEFINITION - SCHEMA - SCALAR - OBJECT - FIELD_DEFINITION - ARGUMENT_DEFINITION - INTERFACE - UNION - ENUM - ENUM_VALUE - INPUT_OBJECT - INPUT_FIELD_DEFINITION -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_argument_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_argument_names.go deleted file mode 100644 index 36b2d057c..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_argument_names.go +++ /dev/null @@ -1,59 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("KnownArgumentNames", func(observers *Events, addError AddErrFunc) { - // A GraphQL field is only valid if all supplied arguments are defined by that field. - observers.OnField(func(_ *Walker, field *ast.Field) { - if field.Definition == nil || field.ObjectDefinition == nil { - return - } - for _, arg := range field.Arguments { - def := field.Definition.Arguments.ForName(arg.Name) - if def != nil { - continue - } - - var suggestions []string - for _, argDef := range field.Definition.Arguments { - suggestions = append(suggestions, argDef.Name) - } - - addError( - Message(`Unknown argument "%s" on field "%s.%s".`, arg.Name, field.ObjectDefinition.Name, field.Name), - SuggestListQuoted("Did you mean", arg.Name, suggestions), - At(field.Position), - ) - } - }) - - observers.OnDirective(func(_ *Walker, directive *ast.Directive) { - if directive.Definition == nil { - return - } - for _, arg := range directive.Arguments { - def := directive.Definition.Arguments.ForName(arg.Name) - if def != nil { - continue - } - - var suggestions []string - for _, argDef := range directive.Definition.Arguments { - suggestions = append(suggestions, argDef.Name) - } - - addError( - Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name), - SuggestListQuoted("Did you mean", arg.Name, suggestions), - At(directive.Position), - ) - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_fragment_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_fragment_names.go deleted file mode 100644 index 8ae1fc33f..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_fragment_names.go +++ /dev/null @@ -1,21 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("KnownFragmentNames", func(observers *Events, addError AddErrFunc) { - observers.OnFragmentSpread(func(_ *Walker, fragmentSpread *ast.FragmentSpread) { - if fragmentSpread.Definition == nil { - addError( - Message(`Unknown fragment "%s".`, fragmentSpread.Name), - At(fragmentSpread.Position), - ) - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_type_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_type_names.go deleted file mode 100644 index aa9809be3..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_type_names.go +++ /dev/null @@ -1,61 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("KnownTypeNames", func(observers *Events, addError AddErrFunc) { - observers.OnVariable(func(walker *Walker, variable *ast.VariableDefinition) { - typeName := variable.Type.Name() - typdef := walker.Schema.Types[typeName] - if typdef != nil { - return - } - - addError( - Message(`Unknown type "%s".`, typeName), - At(variable.Position), - ) - }) - - observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { - typedName := inlineFragment.TypeCondition - if typedName == "" { - return - } - - def := walker.Schema.Types[typedName] - if def != nil { - return - } - - addError( - Message(`Unknown type "%s".`, typedName), - At(inlineFragment.Position), - ) - }) - - observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { - typeName := fragment.TypeCondition - def := walker.Schema.Types[typeName] - if def != nil { - return - } - - var possibleTypes []string - for _, t := range walker.Schema.Types { - possibleTypes = append(possibleTypes, t.Name) - } - - addError( - Message(`Unknown type "%s".`, typeName), - SuggestListQuoted("Did you mean", typeName, possibleTypes), - At(fragment.Position), - ) - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/lone_anonymous_operation.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/lone_anonymous_operation.go deleted file mode 100644 index 2af7b5a03..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/lone_anonymous_operation.go +++ /dev/null @@ -1,21 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("LoneAnonymousOperation", func(observers *Events, addError AddErrFunc) { - observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { - if operation.Name == "" && len(walker.Document.Operations) > 1 { - addError( - Message(`This anonymous operation must be the only defined operation.`), - At(operation.Position), - ) - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_fragments.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_fragments.go deleted file mode 100644 index f6ba046a1..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_fragments.go +++ /dev/null @@ -1,32 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("NoUnusedFragments", func(observers *Events, addError AddErrFunc) { - - inFragmentDefinition := false - fragmentNameUsed := make(map[string]bool) - - observers.OnFragmentSpread(func(_ *Walker, fragmentSpread *ast.FragmentSpread) { - if !inFragmentDefinition { - fragmentNameUsed[fragmentSpread.Name] = true - } - }) - - observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) { - inFragmentDefinition = true - if !fragmentNameUsed[fragment.Name] { - addError( - Message(`Fragment "%s" is never used.`, fragment.Name), - At(fragment.Position), - ) - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_variables.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_variables.go deleted file mode 100644 index 163ac895b..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_unused_variables.go +++ /dev/null @@ -1,32 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("NoUnusedVariables", func(observers *Events, addError AddErrFunc) { - observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) { - for _, varDef := range operation.VariableDefinitions { - if varDef.Used { - continue - } - - if operation.Name != "" { - addError( - Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name), - At(varDef.Position), - ) - } else { - addError( - Message(`Variable "$%s" is never used.`, varDef.Variable), - At(varDef.Position), - ) - } - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_argument_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_argument_names.go deleted file mode 100644 index 7458c5f6c..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_argument_names.go +++ /dev/null @@ -1,35 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("UniqueArgumentNames", func(observers *Events, addError AddErrFunc) { - observers.OnField(func(_ *Walker, field *ast.Field) { - checkUniqueArgs(field.Arguments, addError) - }) - - observers.OnDirective(func(_ *Walker, directive *ast.Directive) { - checkUniqueArgs(directive.Arguments, addError) - }) - }) -} - -func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) { - knownArgNames := map[string]int{} - - for _, arg := range args { - if knownArgNames[arg.Name] == 1 { - addError( - Message(`There can be only one argument named "%s".`, arg.Name), - At(arg.Position), - ) - } - - knownArgNames[arg.Name]++ - } -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_directives_per_location.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_directives_per_location.go deleted file mode 100644 index ecf5a0a82..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_directives_per_location.go +++ /dev/null @@ -1,26 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("UniqueDirectivesPerLocation", func(observers *Events, addError AddErrFunc) { - observers.OnDirectiveList(func(_ *Walker, directives []*ast.Directive) { - seen := map[string]bool{} - - for _, dir := range directives { - if dir.Name != "repeatable" && seen[dir.Name] { - addError( - Message(`The directive "@%s" can only be used once at this location.`, dir.Name), - At(dir.Position), - ) - } - seen[dir.Name] = true - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_fragment_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_fragment_names.go deleted file mode 100644 index c94f3ad27..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_fragment_names.go +++ /dev/null @@ -1,24 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("UniqueFragmentNames", func(observers *Events, addError AddErrFunc) { - seenFragments := map[string]bool{} - - observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) { - if seenFragments[fragment.Name] { - addError( - Message(`There can be only one fragment named "%s".`, fragment.Name), - At(fragment.Position), - ) - } - seenFragments[fragment.Name] = true - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_input_field_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_input_field_names.go deleted file mode 100644 index a93d63bd1..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_input_field_names.go +++ /dev/null @@ -1,29 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("UniqueInputFieldNames", func(observers *Events, addError AddErrFunc) { - observers.OnValue(func(_ *Walker, value *ast.Value) { - if value.Kind != ast.ObjectValue { - return - } - - seen := map[string]bool{} - for _, field := range value.Children { - if seen[field.Name] { - addError( - Message(`There can be only one input field named "%s".`, field.Name), - At(field.Position), - ) - } - seen[field.Name] = true - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_operation_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_operation_names.go deleted file mode 100644 index dcd404dad..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_operation_names.go +++ /dev/null @@ -1,24 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("UniqueOperationNames", func(observers *Events, addError AddErrFunc) { - seen := map[string]bool{} - - observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) { - if seen[operation.Name] { - addError( - Message(`There can be only one operation named "%s".`, operation.Name), - At(operation.Position), - ) - } - seen[operation.Name] = true - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_variable_names.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_variable_names.go deleted file mode 100644 index 7a214dbe4..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/unique_variable_names.go +++ /dev/null @@ -1,26 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("UniqueVariableNames", func(observers *Events, addError AddErrFunc) { - observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) { - seen := map[string]int{} - for _, def := range operation.VariableDefinitions { - // add the same error only once per a variable. - if seen[def.Variable] == 1 { - addError( - Message(`There can be only one variable named "$%s".`, def.Variable), - At(def.Position), - ) - } - seen[def.Variable]++ - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go deleted file mode 100644 index afd9f54f1..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/values_of_correct_type.go +++ /dev/null @@ -1,168 +0,0 @@ -package validator - -import ( - "errors" - "fmt" - "strconv" - - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("ValuesOfCorrectType", func(observers *Events, addError AddErrFunc) { - observers.OnValue(func(_ *Walker, value *ast.Value) { - if value.Definition == nil || value.ExpectedType == nil { - return - } - - if value.Kind == ast.NullValue && value.ExpectedType.NonNull { - addError( - Message(`Expected value of type "%s", found %s.`, value.ExpectedType.String(), value.String()), - At(value.Position), - ) - } - - if value.Definition.Kind == ast.Scalar { - // Skip custom validating scalars - if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") { - return - } - } - - var possibleEnums []string - if value.Definition.Kind == ast.Enum { - for _, val := range value.Definition.EnumValues { - possibleEnums = append(possibleEnums, val.Name) - } - } - - rawVal, err := value.Value(nil) - if err != nil { - unexpectedTypeMessage(addError, value) - } - - switch value.Kind { - case ast.NullValue: - return - case ast.ListValue: - if value.ExpectedType.Elem == nil { - unexpectedTypeMessage(addError, value) - return - } - - case ast.IntValue: - if !value.Definition.OneOf("Int", "Float", "ID") { - unexpectedTypeMessage(addError, value) - } - - case ast.FloatValue: - if !value.Definition.OneOf("Float") { - unexpectedTypeMessage(addError, value) - } - - case ast.StringValue, ast.BlockValue: - if value.Definition.Kind == ast.Enum { - rawValStr := fmt.Sprint(rawVal) - addError( - Message(`Enum "%s" cannot represent non-enum value: %s.`, value.ExpectedType.String(), value.String()), - SuggestListQuoted("Did you mean the enum value", rawValStr, possibleEnums), - At(value.Position), - ) - } else if !value.Definition.OneOf("String", "ID") { - unexpectedTypeMessage(addError, value) - } - - case ast.EnumValue: - if value.Definition.Kind != ast.Enum { - rawValStr := fmt.Sprint(rawVal) - addError( - unexpectedTypeMessageOnly(value), - SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums), - At(value.Position), - ) - } else if value.Definition.EnumValues.ForName(value.Raw) == nil { - rawValStr := fmt.Sprint(rawVal) - addError( - Message(`Value "%s" does not exist in "%s" enum.`, value.String(), value.ExpectedType.String()), - SuggestListQuoted("Did you mean the enum value", rawValStr, possibleEnums), - At(value.Position), - ) - } - - case ast.BooleanValue: - if !value.Definition.OneOf("Boolean") { - unexpectedTypeMessage(addError, value) - } - - case ast.ObjectValue: - - for _, field := range value.Definition.Fields { - if field.Type.NonNull { - fieldValue := value.Children.ForName(field.Name) - if fieldValue == nil && field.DefaultValue == nil { - addError( - Message(`Field "%s.%s" of required type "%s" was not provided.`, value.Definition.Name, field.Name, field.Type.String()), - At(value.Position), - ) - continue - } - } - } - - for _, fieldValue := range value.Children { - if value.Definition.Fields.ForName(fieldValue.Name) == nil { - var suggestions []string - for _, fieldValue := range value.Definition.Fields { - suggestions = append(suggestions, fieldValue.Name) - } - - addError( - Message(`Field "%s" is not defined by type "%s".`, fieldValue.Name, value.Definition.Name), - SuggestListQuoted("Did you mean", fieldValue.Name, suggestions), - At(fieldValue.Position), - ) - } - } - - case ast.Variable: - return - - default: - panic(fmt.Errorf("unhandled %T", value)) - } - }) - }) -} - -func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) { - addError( - unexpectedTypeMessageOnly(v), - At(v.Position), - ) -} - -func unexpectedTypeMessageOnly(v *ast.Value) ErrorOption { - switch v.ExpectedType.String() { - case "Int", "Int!": - if _, err := strconv.ParseInt(v.Raw, 10, 32); err != nil && errors.Is(err, strconv.ErrRange) { - return Message(`Int cannot represent non 32-bit signed integer value: %s`, v.String()) - } - return Message(`Int cannot represent non-integer value: %s`, v.String()) - case "String", "String!", "[String]": - return Message(`String cannot represent a non string value: %s`, v.String()) - case "Boolean", "Boolean!": - return Message(`Boolean cannot represent a non boolean value: %s`, v.String()) - case "Float", "Float!": - return Message(`Float cannot represent non numeric value: %s`, v.String()) - case "ID", "ID!": - return Message(`ID cannot represent a non-string and non-integer value: %s`, v.String()) - default: - if v.Definition.Kind == ast.Enum { - return Message(`Enum "%s" cannot represent non-enum value: %s.`, v.ExpectedType.String(), v.String()) - } - return Message(`Expected value of type "%s", found %s.`, v.ExpectedType.String(), v.String()) - } -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_are_input_types.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_are_input_types.go deleted file mode 100644 index ea4dfcc5a..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_are_input_types.go +++ /dev/null @@ -1,30 +0,0 @@ -package validator - -import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" -) - -func init() { - AddRule("VariablesAreInputTypes", func(observers *Events, addError AddErrFunc) { - observers.OnOperation(func(_ *Walker, operation *ast.OperationDefinition) { - for _, def := range operation.VariableDefinitions { - if def.Definition == nil { - continue - } - if !def.Definition.IsInputType() { - addError( - Message( - `Variable "$%s" cannot be non-input type "%s".`, - def.Variable, - def.Type.String(), - ), - At(def.Position), - ) - } - } - }) - }) -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/validator.go b/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/validator.go deleted file mode 100644 index 05f5b9166..000000000 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/validator.go +++ /dev/null @@ -1,45 +0,0 @@ -package validator - -import ( - //nolint:revive - . "github.com/open-policy-agent/opa/internal/gqlparser/ast" - "github.com/open-policy-agent/opa/internal/gqlparser/gqlerror" -) - -type AddErrFunc func(options ...ErrorOption) - -type ruleFunc func(observers *Events, addError AddErrFunc) - -type rule struct { - name string - rule ruleFunc -} - -var rules []rule - -// addRule to rule set. -// f is called once each time `Validate` is executed. -func AddRule(name string, f ruleFunc) { - rules = append(rules, rule{name: name, rule: f}) -} - -func Validate(schema *Schema, doc *QueryDocument) gqlerror.List { - var errs gqlerror.List - - observers := &Events{} - for i := range rules { - rule := rules[i] - rule.rule(observers, func(options ...ErrorOption) { - err := &gqlerror.Error{ - Rule: rule.name, - } - for _, o := range options { - o(err) - } - errs = append(errs, err) - }) - } - - Walk(schema, doc, observers) - return errs -} diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/key_type.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/key_type.go index 98f0cc42e..61d23844a 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/key_type.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/key_type.go @@ -21,7 +21,7 @@ const ( // Accept is used when conversion from values given by // outside sources (such as JSON payloads) is required -func (keyType *KeyType) Accept(value interface{}) error { +func (keyType *KeyType) Accept(value any) error { var tmp KeyType switch x := value.(type) { case string: diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/signature.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/signature.go index 45e400176..c601c46ea 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/signature.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwa/signature.go @@ -32,7 +32,7 @@ const ( // Accept is used when conversion from values given by // outside sources (such as JSON payloads) is required -func (signature *SignatureAlgorithm) Accept(value interface{}) error { +func (signature *SignatureAlgorithm) Accept(value any) error { var tmp SignatureAlgorithm switch x := value.(type) { case string: diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/ecdsa.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/ecdsa.go index b46689f03..0677f4dc3 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/ecdsa.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/ecdsa.go @@ -39,12 +39,12 @@ func newECDSAPrivateKey(key *ecdsa.PrivateKey) (*ECDSAPrivateKey, error) { } // Materialize returns the EC-DSA public key represented by this JWK -func (k ECDSAPublicKey) Materialize() (interface{}, error) { +func (k ECDSAPublicKey) Materialize() (any, error) { return k.key, nil } // Materialize returns the EC-DSA private key represented by this JWK -func (k ECDSAPrivateKey) Materialize() (interface{}, error) { +func (k ECDSAPrivateKey) Materialize() (any, error) { return k.key, nil } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/headers.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/headers.go index b0fd51e90..b1a6763dd 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/headers.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/headers.go @@ -18,15 +18,15 @@ const ( // Headers provides a common interface to all future possible headers type Headers interface { - Get(string) (interface{}, bool) - Set(string, interface{}) error - Walk(func(string, interface{}) error) error + Get(string) (any, bool) + Set(string, any) error + Walk(func(string, any) error) error GetAlgorithm() jwa.SignatureAlgorithm GetKeyID() string GetKeyOps() KeyOperationList GetKeyType() jwa.KeyType GetKeyUsage() string - GetPrivateParams() map[string]interface{} + GetPrivateParams() map[string]any } // StandardHeaders stores the common JWK parameters @@ -36,7 +36,7 @@ type StandardHeaders struct { KeyOps KeyOperationList `json:"key_ops,omitempty"` // https://tools.ietf.org/html/rfc7517#section-4.3 KeyType jwa.KeyType `json:"kty,omitempty"` // https://tools.ietf.org/html/rfc7517#section-4.1 KeyUsage string `json:"use,omitempty"` // https://tools.ietf.org/html/rfc7517#section-4.2 - PrivateParams map[string]interface{} `json:"privateParams,omitempty"` // https://tools.ietf.org/html/rfc7515#section-4.1.4 + PrivateParams map[string]any `json:"privateParams,omitempty"` // https://tools.ietf.org/html/rfc7515#section-4.1.4 } // GetAlgorithm is a convenience function to retrieve the corresponding value stored in the StandardHeaders @@ -68,12 +68,12 @@ func (h *StandardHeaders) GetKeyUsage() string { } // GetPrivateParams is a convenience function to retrieve the corresponding value stored in the StandardHeaders -func (h *StandardHeaders) GetPrivateParams() map[string]interface{} { +func (h *StandardHeaders) GetPrivateParams() map[string]any { return h.PrivateParams } // Get is a general getter function for JWK StandardHeaders structure -func (h *StandardHeaders) Get(name string) (interface{}, bool) { +func (h *StandardHeaders) Get(name string) (any, bool) { switch name { case AlgorithmKey: alg := h.GetAlgorithm() @@ -117,7 +117,7 @@ func (h *StandardHeaders) Get(name string) (interface{}, bool) { } // Set is a general getter function for JWK StandardHeaders structure -func (h *StandardHeaders) Set(name string, value interface{}) error { +func (h *StandardHeaders) Set(name string, value any) error { switch name { case AlgorithmKey: var acceptor jwa.SignatureAlgorithm @@ -149,7 +149,7 @@ func (h *StandardHeaders) Set(name string, value interface{}) error { } return fmt.Errorf("invalid value for %s key: %T", KeyUsageKey, value) case PrivateParamsKey: - if v, ok := value.(map[string]interface{}); ok { + if v, ok := value.(map[string]any); ok { h.PrivateParams = v return nil } @@ -160,7 +160,7 @@ func (h *StandardHeaders) Set(name string, value interface{}) error { } // Walk iterates over all JWK standard headers fields while applying a function to its value. -func (h StandardHeaders) Walk(f func(string, interface{}) error) error { +func (h StandardHeaders) Walk(f func(string, any) error) error { for _, key := range []string{AlgorithmKey, KeyIDKey, KeyOpsKey, KeyTypeKey, KeyUsageKey, PrivateParamsKey} { if v, ok := h.Get(key); ok { if err := f(key, v); err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/interface.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/interface.go index 7a7d03ef1..9c7846269 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/interface.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/interface.go @@ -24,7 +24,7 @@ type Key interface { // RSA types would create *rsa.PublicKey or *rsa.PrivateKey, // EC types would create *ecdsa.PublicKey or *ecdsa.PrivateKey, // and OctetSeq types create a []byte key. - Materialize() (interface{}, error) + Materialize() (any, error) GenerateKey(*RawKeyJSON) error } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go index 7de27d4e4..b13245d17 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/jwk.go @@ -15,7 +15,7 @@ import ( // For rsa key types *rsa.PublicKey is returned; for ecdsa key types *ecdsa.PublicKey; // for byte slice (raw) keys, the key itself is returned. If the corresponding // public key cannot be deduced, an error is returned -func GetPublicKey(key interface{}) (interface{}, error) { +func GetPublicKey(key any) (any, error) { if key == nil { return nil, errors.New("jwk.New requires a non-nil key") } @@ -23,7 +23,7 @@ func GetPublicKey(key interface{}) (interface{}, error) { switch v := key.(type) { // Mental note: although Public() is defined in both types, // you can not coalesce the clauses for rsa.PrivateKey and - // ecdsa.PrivateKey, as then `v` becomes interface{} + // ecdsa.PrivateKey, as then `v` becomes any // b/c the compiler cannot deduce the exact type. case *rsa.PrivateKey: return v.Public(), nil @@ -37,7 +37,7 @@ func GetPublicKey(key interface{}) (interface{}, error) { } // GetKeyTypeFromKey creates a jwk.Key from the given key. -func GetKeyTypeFromKey(key interface{}) jwa.KeyType { +func GetKeyTypeFromKey(key any) jwa.KeyType { switch key.(type) { case *rsa.PrivateKey, *rsa.PublicKey: @@ -52,7 +52,7 @@ func GetKeyTypeFromKey(key interface{}) jwa.KeyType { } // New creates a jwk.Key from the given key. -func New(key interface{}) (Key, error) { +func New(key any) (Key, error) { if key == nil { return nil, errors.New("jwk.New requires a non-nil key") } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/key_ops.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/key_ops.go index c02b0b999..628caae4a 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/key_ops.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/key_ops.go @@ -39,7 +39,7 @@ const ( ) // Accept determines if Key Operation is valid -func (keyOperationList *KeyOperationList) Accept(v interface{}) error { +func (keyOperationList *KeyOperationList) Accept(v any) error { switch x := v.(type) { case KeyOperationList: *keyOperationList = x diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/rsa.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/rsa.go index 11b8e3b56..d7b508941 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/rsa.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/rsa.go @@ -65,7 +65,7 @@ func newRSAPrivateKey(key *rsa.PrivateKey) (*RSAPrivateKey, error) { } // Materialize returns the standard RSA Public Key representation stored in the internal representation -func (k *RSAPublicKey) Materialize() (interface{}, error) { +func (k *RSAPublicKey) Materialize() (any, error) { if k.key == nil { return nil, errors.New("key has no rsa.PublicKey associated with it") } @@ -73,7 +73,7 @@ func (k *RSAPublicKey) Materialize() (interface{}, error) { } // Materialize returns the standard RSA Private Key representation stored in the internal representation -func (k *RSAPrivateKey) Materialize() (interface{}, error) { +func (k *RSAPrivateKey) Materialize() (any, error) { if k.key == nil { return nil, errors.New("key has no rsa.PrivateKey associated with it") } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/symmetric.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/symmetric.go index e0cc0751e..e76189f52 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/symmetric.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jwk/symmetric.go @@ -21,7 +21,7 @@ func newSymmetricKey(key []byte) (*SymmetricKey, error) { // Materialize returns the octets for this symmetric key. // Since this is a symmetric key, this just calls Octets -func (s SymmetricKey) Materialize() (interface{}, error) { +func (s SymmetricKey) Materialize() (any, error) { return s.Octets(), nil } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/headers.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/headers.go index 0c8b35508..dcadea43e 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/headers.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/headers.go @@ -20,8 +20,8 @@ const ( // Headers provides a common interface for common header parameters type Headers interface { - Get(string) (interface{}, bool) - Set(string, interface{}) error + Get(string) (any, bool) + Set(string, any) error GetAlgorithm() jwa.SignatureAlgorithm } @@ -33,7 +33,7 @@ type StandardHeaders struct { JWK string `json:"jwk,omitempty"` // https://tools.ietf.org/html/rfc7515#section-4.1.3 JWKSetURL string `json:"jku,omitempty"` // https://tools.ietf.org/html/rfc7515#section-4.1.2 KeyID string `json:"kid,omitempty"` // https://tools.ietf.org/html/rfc7515#section-4.1.4 - PrivateParams map[string]interface{} `json:"privateParams,omitempty"` // https://tools.ietf.org/html/rfc7515#section-4.1.9 + PrivateParams map[string]any `json:"privateParams,omitempty"` // https://tools.ietf.org/html/rfc7515#section-4.1.9 Type string `json:"typ,omitempty"` // https://tools.ietf.org/html/rfc7515#section-4.1.9 } @@ -43,7 +43,7 @@ func (h *StandardHeaders) GetAlgorithm() jwa.SignatureAlgorithm { } // Get is a general getter function for StandardHeaders structure -func (h *StandardHeaders) Get(name string) (interface{}, bool) { +func (h *StandardHeaders) Get(name string) (any, bool) { switch name { case AlgorithmKey: v := h.Algorithm @@ -99,7 +99,7 @@ func (h *StandardHeaders) Get(name string) (interface{}, bool) { } // Set is a general setter function for StandardHeaders structure -func (h *StandardHeaders) Set(name string, value interface{}) error { +func (h *StandardHeaders) Set(name string, value any) error { switch name { case AlgorithmKey: if err := h.Algorithm.Accept(value); err != nil { @@ -137,7 +137,7 @@ func (h *StandardHeaders) Set(name string, value interface{}) error { } return fmt.Errorf("invalid value for %s key: %T", KeyIDKey, value) case PrivateParamsKey: - if v, ok := value.(map[string]interface{}); ok { + if v, ok := value.(map[string]any); ok { h.PrivateParams = v return nil } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go index 20fb957d3..b2b224830 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/jws.go @@ -38,7 +38,7 @@ import ( // SignLiteral generates a Signature for the given Payload and Headers, and serializes // it in compact serialization format. In this format you may NOT use // multiple signers. -func SignLiteral(payload []byte, alg jwa.SignatureAlgorithm, key interface{}, hdrBuf []byte, rnd io.Reader) ([]byte, error) { +func SignLiteral(payload []byte, alg jwa.SignatureAlgorithm, key any, hdrBuf []byte, rnd io.Reader) ([]byte, error) { encodedHdr := base64.RawURLEncoding.EncodeToString(hdrBuf) encodedPayload := base64.RawURLEncoding.EncodeToString(payload) signingInput := strings.Join( @@ -77,7 +77,7 @@ func SignLiteral(payload []byte, alg jwa.SignatureAlgorithm, key interface{}, hd // multiple signers. // // If you would like to pass custom Headers, use the WithHeaders option. -func SignWithOption(payload []byte, alg jwa.SignatureAlgorithm, key interface{}) ([]byte, error) { +func SignWithOption(payload []byte, alg jwa.SignatureAlgorithm, key any) ([]byte, error) { var headers Headers = &StandardHeaders{} err := headers.Set(AlgorithmKey, alg) @@ -99,7 +99,7 @@ func SignWithOption(payload []byte, alg jwa.SignatureAlgorithm, key interface{}) // Payload that was signed is returned. If you need more fine-grained // control of the verification process, manually call `Parse`, generate a // verifier, and call `Verify` on the parsed JWS message object. -func Verify(buf []byte, alg jwa.SignatureAlgorithm, key interface{}) (ret []byte, err error) { +func Verify(buf []byte, alg jwa.SignatureAlgorithm, key any) (ret []byte, err error) { verifier, err := verify.New(alg) if err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/ecdsa.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/ecdsa.go index db1aadec6..5f3e8acca 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/ecdsa.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/ecdsa.go @@ -72,7 +72,7 @@ func (s ECDSASigner) Algorithm() jwa.SignatureAlgorithm { // SignWithRand signs payload with a ECDSA private key and a provided randomness // source (such as `rand.Reader`). -func (s ECDSASigner) SignWithRand(payload []byte, key interface{}, r io.Reader) ([]byte, error) { +func (s ECDSASigner) SignWithRand(payload []byte, key any, r io.Reader) ([]byte, error) { if key == nil { return nil, errors.New("missing private key while signing payload") } @@ -85,6 +85,6 @@ func (s ECDSASigner) SignWithRand(payload []byte, key interface{}, r io.Reader) } // Sign signs payload with a ECDSA private key -func (s ECDSASigner) Sign(payload []byte, key interface{}) ([]byte, error) { +func (s ECDSASigner) Sign(payload []byte, key any) ([]byte, error) { return s.SignWithRand(payload, key, rand.Reader) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/hmac.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/hmac.go index a4fad4208..de541755e 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/hmac.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/hmac.go @@ -52,7 +52,7 @@ func (s HMACSigner) Algorithm() jwa.SignatureAlgorithm { } // Sign signs payload with a Symmetric key -func (s HMACSigner) Sign(payload []byte, key interface{}) ([]byte, error) { +func (s HMACSigner) Sign(payload []byte, key any) ([]byte, error) { hmackey, ok := key.([]byte) if !ok { return nil, fmt.Errorf(`invalid key type %T. []byte is required`, key) diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/interface.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/interface.go index 2ef2bee48..25b592ed4 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/interface.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/interface.go @@ -16,7 +16,7 @@ type Signer interface { // for `jwa.RSXXX` and `jwa.PSXXX` types, you need to pass the // `*"crypto/rsa".PrivateKey` type. // Check the documentation for each signer for details - Sign(payload []byte, key interface{}) ([]byte, error) + Sign(payload []byte, key any) ([]byte, error) Algorithm() jwa.SignatureAlgorithm } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/rsa.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/rsa.go index 1e02993eb..a671b7318 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/rsa.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/rsa.go @@ -84,7 +84,7 @@ func (s RSASigner) Algorithm() jwa.SignatureAlgorithm { // Sign creates a signature using crypto/rsa. key must be a non-nil instance of // `*"crypto/rsa".PrivateKey`. -func (s RSASigner) Sign(payload []byte, key interface{}) ([]byte, error) { +func (s RSASigner) Sign(payload []byte, key any) ([]byte, error) { if key == nil { return nil, errors.New(`missing private key while signing payload`) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/sign.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/sign.go index fd123eb75..c1432236f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/sign.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/sign/sign.go @@ -26,7 +26,7 @@ func New(alg jwa.SignatureAlgorithm) (Signer, error) { // GetSigningKey returns a *rsa.PrivateKey or *ecdsa.PrivateKey typically encoded in PEM blocks of type "RSA PRIVATE KEY" // or "EC PRIVATE KEY" for RSA and ECDSA family of algorithms. // For HMAC family, it return a []byte value -func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (interface{}, error) { +func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (any, error) { switch alg { case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512: block, _ := pem.Decode([]byte(key)) diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/ecdsa.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/ecdsa.go index 0d4971dc1..ba32078ac 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/ecdsa.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/ecdsa.go @@ -54,7 +54,7 @@ func newECDSA(alg jwa.SignatureAlgorithm) (*ECDSAVerifier, error) { } // Verify checks whether the signature for a given input and key is correct -func (v ECDSAVerifier) Verify(payload []byte, signature []byte, key interface{}) error { +func (v ECDSAVerifier) Verify(payload []byte, signature []byte, key any) error { if key == nil { return errors.New(`missing public key while verifying payload`) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/hmac.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/hmac.go index d8498f50f..25651a0f8 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/hmac.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/hmac.go @@ -19,7 +19,7 @@ func newHMAC(alg jwa.SignatureAlgorithm) (*HMACVerifier, error) { } // Verify checks whether the signature for a given input and key is correct -func (v HMACVerifier) Verify(signingInput, signature []byte, key interface{}) (err error) { +func (v HMACVerifier) Verify(signingInput, signature []byte, key any) (err error) { expected, err := v.signer.Sign(signingInput, key) if err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/interface.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/interface.go index f5beb6974..e72c3ed7f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/interface.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/interface.go @@ -16,7 +16,7 @@ type Verifier interface { // for `jwa.RSXXX` and `jwa.PSXXX` types, you need to pass the // `*"crypto/rsa".PublicKey` type. // Check the documentation for each verifier for details - Verify(payload []byte, signature []byte, key interface{}) error + Verify(payload []byte, signature []byte, key any) error } type rsaVerifyFunc func([]byte, []byte, *rsa.PublicKey) error diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/rsa.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/rsa.go index edc560dfa..163ff84bc 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/rsa.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/rsa.go @@ -75,7 +75,7 @@ func newRSA(alg jwa.SignatureAlgorithm) (*RSAVerifier, error) { } // Verify checks if a JWS is valid. -func (v RSAVerifier) Verify(payload, signature []byte, key interface{}) error { +func (v RSAVerifier) Verify(payload, signature []byte, key any) error { if key == nil { return errors.New(`missing public key while verifying payload`) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/verify.go b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/verify.go index 04ee9141e..7370b4a2f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/verify.go +++ b/vendor/github.com/open-policy-agent/opa/internal/jwx/jws/verify/verify.go @@ -29,7 +29,7 @@ func New(alg jwa.SignatureAlgorithm) (Verifier, error) { // GetSigningKey returns a *rsa.PublicKey or *ecdsa.PublicKey typically encoded in PEM blocks of type "PUBLIC KEY", // for RSA and ECDSA family of algorithms. // For HMAC family, it return a []byte value -func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (interface{}, error) { +func GetSigningKey(key string, alg jwa.SignatureAlgorithm) (any, error) { switch alg { case jwa.RS256, jwa.RS384, jwa.RS512, jwa.PS256, jwa.PS384, jwa.PS512, jwa.ES256, jwa.ES384, jwa.ES512: block, _ := pem.Decode([]byte(key)) diff --git a/vendor/github.com/open-policy-agent/opa/internal/merge/merge.go b/vendor/github.com/open-policy-agent/opa/internal/merge/merge.go index 16f39350b..ba1a09c32 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/merge/merge.go +++ b/vendor/github.com/open-policy-agent/opa/internal/merge/merge.go @@ -8,7 +8,7 @@ package merge // InterfaceMaps returns the result of merging a and b. If a and b cannot be // merged because of conflicting key-value pairs, ok is false. -func InterfaceMaps(a map[string]interface{}, b map[string]interface{}) (map[string]interface{}, bool) { +func InterfaceMaps(a map[string]any, b map[string]any) (map[string]any, bool) { if a == nil { return b, true @@ -21,7 +21,7 @@ func InterfaceMaps(a map[string]interface{}, b map[string]interface{}) (map[stri return merge(a, b), true } -func merge(a, b map[string]interface{}) map[string]interface{} { +func merge(a, b map[string]any) map[string]any { for k := range b { @@ -32,8 +32,8 @@ func merge(a, b map[string]interface{}) map[string]interface{} { continue } - existObj := exist.(map[string]interface{}) - addObj := add.(map[string]interface{}) + existObj := exist.(map[string]any) + addObj := add.(map[string]any) a[k] = merge(existObj, addObj) } @@ -41,7 +41,7 @@ func merge(a, b map[string]interface{}) map[string]interface{} { return a } -func hasConflicts(a, b map[string]interface{}) bool { +func hasConflicts(a, b map[string]any) bool { for k := range b { add := b[k] @@ -50,8 +50,8 @@ func hasConflicts(a, b map[string]interface{}) bool { continue } - existObj, existOk := exist.(map[string]interface{}) - addObj, addOk := add.(map[string]interface{}) + existObj, existOk := exist.(map[string]any) + addObj, addOk := add.(map[string]any) if !existOk || !addOk { return true } diff --git a/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go b/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go index 661703e08..8d5915871 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go +++ b/vendor/github.com/open-policy-agent/opa/internal/planner/planner.go @@ -51,10 +51,10 @@ type Planner struct { // debugf prepends the planner location. We're passing callstack depth 2 because // it should still log the file location of p.debugf. -func (p *Planner) debugf(format string, args ...interface{}) { +func (p *Planner) debugf(format string, args ...any) { var msg string if p.loc != nil { - msg = fmt.Sprintf("%s: "+format, append([]interface{}{p.loc}, args...)...) + msg = fmt.Sprintf("%s: "+format, append([]any{p.loc}, args...)...) } else { msg = fmt.Sprintf(format, args...) } @@ -211,13 +211,15 @@ func (p *Planner) planRules(rules []*ast.Rule) (string, error) { // Set the location to the rule head. p.loc = rules[0].Head.Loc() + pcount := p.funcs.argVars() + params := make([]ir.Local, 0, pcount+len(rules[0].Head.Args)) + for range pcount { + params = append(params, p.newLocal()) + } // Create function definition for rules. fn := &ir.Func{ - Name: fmt.Sprintf("g%d.%s", p.funcs.gen(), path), - Params: []ir.Local{ - p.newLocal(), // input document - p.newLocal(), // data document - }, + Name: fmt.Sprintf("g%d.%s", p.funcs.gen(), path), + Params: params, Return: p.newLocal(), Path: append([]string{fmt.Sprintf("g%d", p.funcs.gen())}, pathPieces...), } @@ -227,7 +229,10 @@ func (p *Planner) planRules(rules []*ast.Rule) (string, error) { fn.Params = append(fn.Params, p.newLocal()) } - params := fn.Params[2:] + // only those added as formal parameters: + // f(x, y) is planned as f(data, input, x, y) + // pcount > 2 means there are vars passed along through with replacements by variables + params = fn.Params[pcount:] // Initialize return value for partial set/object rules. Complete document // rules assign directly to `fn.Return`. @@ -301,10 +306,11 @@ func (p *Planner) planRules(rules []*ast.Rule) (string, error) { // Setup planner for block. p.lnext = lnext - p.vars = newVarstack(map[ast.Var]ir.Local{ - ast.InputRootDocument.Value.(ast.Var): fn.Params[0], - ast.DefaultRootDocument.Value.(ast.Var): fn.Params[1], - }) + vs := make(map[ast.Var]ir.Local, p.funcs.argVars()) + for i, v := range p.funcs.vars() { + vs[v] = fn.Params[i] + } + p.vars = newVarstack(vs) curr := &ir.Block{} *blocks = append(*blocks, curr) @@ -672,13 +678,17 @@ func (p *Planner) planWith(e *ast.Expr, iter planiter) error { values := make([]*ast.Term, 0, len(e.With)) // NOTE(sr): we could be overallocating if there are builtin replacements targets := make([]ast.Ref, 0, len(e.With)) + vars := []ast.Var{} mocks := frame{} for _, w := range e.With { v := w.Target.Value.(ast.Ref) switch { - case p.isFunction(v): // nothing to do + case p.isFunctionOrBuiltin(v): // track var values + if wvar, ok := w.Value.Value.(ast.Var); ok { + vars = append(vars, wvar) + } case ast.DefaultRootDocument.Equal(v[0]) || ast.InputRootDocument.Equal(v[0]): @@ -735,7 +745,7 @@ func (p *Planner) planWith(e *ast.Expr, iter planiter) error { // planning of this expression (transitively). shadowing := p.dataRefsShadowRuletrie(dataRefs) || len(mocks) > 0 if shadowing { - p.funcs.Push(map[string]string{}) + p.funcs.Push(map[string]string{}, vars) for _, ref := range dataRefs { p.rules.Push(ref) } @@ -756,7 +766,7 @@ func (p *Planner) planWith(e *ast.Expr, iter planiter) error { p.mocks.PushFrame(mocks) if shadowing { - p.funcs.Push(map[string]string{}) + p.funcs.Push(map[string]string{}, vars) for _, ref := range dataRefs { p.rules.Push(ref) } @@ -990,6 +1000,15 @@ func (p *Planner) planExprCall(e *ast.Expr, iter planiter) error { op := e.Operator() if replacement := p.mocks.Lookup(operator); replacement != nil { + if _, ok := replacement.Value.(ast.Var); ok { + var arity int + if node := p.rules.Lookup(op); node != nil { + arity = node.Arity() // NB(sr): We don't need to plan what isn't called, only lookup arity + } else if bi, ok := p.decls[operator]; ok { + arity = bi.Decl.Arity() + } + return p.planExprCallValue(replacement, arity, operands, iter) + } if r, ok := replacement.Value.(ast.Ref); ok { if !r.HasPrefix(ast.DefaultRootRef) && !r.HasPrefix(ast.InputRootRef) { // replacement is builtin @@ -1018,7 +1037,7 @@ func (p *Planner) planExprCall(e *ast.Expr, iter planiter) error { // replacement is a value, or ref if bi, ok := p.decls[operator]; ok { - return p.planExprCallValue(replacement, len(bi.Decl.FuncArgs().Args), operands, iter) + return p.planExprCallValue(replacement, bi.Decl.Arity(), operands, iter) } if node := p.rules.Lookup(op); node != nil { return p.planExprCallValue(replacement, node.Arity(), operands, iter) @@ -1562,9 +1581,7 @@ func (p *Planner) planString(str ast.String, iter planiter) error { } func (p *Planner) planVar(v ast.Var, iter planiter) error { - p.ltarget = op(p.vars.GetOrElse(v, func() ir.Local { - return p.newLocal() - })) + p.ltarget = op(p.vars.GetOrElse(v, p.newLocal)) return iter() } @@ -1922,12 +1939,15 @@ func (p *Planner) planRefData(virtual *ruletrie, base *baseptr, ref ast.Ref, ind if err != nil { return err } - - p.appendStmt(&ir.CallStmt{ + call := ir.CallStmt{ Func: funcName, - Args: p.defaultOperands(), + Args: make([]ir.Operand, 0, p.funcs.argVars()), Result: p.ltarget.Value.(ir.Local), - }) + } + for _, v := range p.funcs.vars() { + call.Args = append(call.Args, p.vars.GetOpOrEmpty(v)) + } + p.appendStmt(&call) return p.planRefRec(ref, index+1, iter) } @@ -2551,17 +2571,20 @@ func (p *Planner) unseenVars(t *ast.Term) bool { } func (p *Planner) defaultOperands() []ir.Operand { - return []ir.Operand{ - p.vars.GetOpOrEmpty(ast.InputRootDocument.Value.(ast.Var)), - p.vars.GetOpOrEmpty(ast.DefaultRootDocument.Value.(ast.Var)), + pcount := p.funcs.argVars() + operands := make([]ir.Operand, pcount) + for i, v := range p.funcs.vars() { + operands[i] = p.vars.GetOpOrEmpty(v) } + return operands } -func (p *Planner) isFunction(r ast.Ref) bool { +func (p *Planner) isFunctionOrBuiltin(r ast.Ref) bool { if node := p.rules.Lookup(r); node != nil { return node.Arity() > 0 } - return false + _, ok := p.decls[r.String()] + return ok } func op(v ir.Val) ir.Operand { diff --git a/vendor/github.com/open-policy-agent/opa/internal/planner/rules.go b/vendor/github.com/open-policy-agent/opa/internal/planner/rules.go index cc7f12bd2..9f3d11529 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/planner/rules.go +++ b/vendor/github.com/open-policy-agent/opa/internal/planner/rules.go @@ -20,20 +20,44 @@ type funcstack struct { } type taggedPairs struct { - pairs map[string]string - gen int + pairs map[string]string + vars []ast.Var + vcount int + gen int } func newFuncstack() *funcstack { return &funcstack{ - stack: []taggedPairs{{pairs: map[string]string{}, gen: 0}}, - next: 1} + stack: []taggedPairs{ + { + pairs: map[string]string{}, + gen: 0, + vars: []ast.Var{ + ast.InputRootDocument.Value.(ast.Var), + ast.DefaultRootDocument.Value.(ast.Var), + }, + vcount: 2, + }, + }, + next: 1} } func (p funcstack) last() taggedPairs { return p.stack[len(p.stack)-1] } +func (p funcstack) argVars() int { + return p.last().vcount +} + +func (p funcstack) vars() []ast.Var { + ret := make([]ast.Var, 0, p.last().vcount) + for i := range p.stack { + ret = append(ret, p.stack[i].vars...) + } + return ret +} + func (p funcstack) Add(key, value string) { p.last().pairs[key] = value } @@ -43,8 +67,13 @@ func (p funcstack) Get(key string) (string, bool) { return value, ok } -func (p *funcstack) Push(funcs map[string]string) { - p.stack = append(p.stack, taggedPairs{pairs: funcs, gen: p.next}) +func (p *funcstack) Push(funcs map[string]string, vars []ast.Var) { + p.stack = append(p.stack, taggedPairs{ + pairs: funcs, + gen: p.next, + vars: vars, + vcount: p.last().vcount + len(vars), + }) p.next++ } diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/compare.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/compare.go index 103dc7766..e2514423b 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/compare.go +++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/crypto/compare.go @@ -16,7 +16,7 @@ func ConstantTimeByteCompare(x, y []byte) (int, error) { xLarger, yLarger := 0, 0 - for i := 0; i < len(x); i++ { + for i := range x { xByte, yByte := int(x[i]), int(y[i]) x := ((yByte - xByte) >> 8) & 1 diff --git a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/util.go b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/util.go index 9ce9af90d..d43339c96 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/providers/aws/util.go +++ b/vendor/github.com/open-policy-agent/opa/internal/providers/aws/util.go @@ -18,7 +18,7 @@ func DoRequestWithClient(req *http.Request, client *http.Client, desc string, lo } defer resp.Body.Close() - logger.WithFields(map[string]interface{}{ + logger.WithFields(map[string]any{ "url": req.URL.String(), "status": resp.Status, "headers": resp.Header, diff --git a/vendor/github.com/open-policy-agent/opa/internal/ref/ref.go b/vendor/github.com/open-policy-agent/opa/internal/ref/ref.go index 173b5a3c1..653794b0a 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/ref/ref.go +++ b/vendor/github.com/open-policy-agent/opa/internal/ref/ref.go @@ -16,10 +16,7 @@ import ( // ParseDataPath returns a ref from the slash separated path s rooted at data. // All path segments are treated as identifier strings. func ParseDataPath(s string) (ast.Ref, error) { - - s = "/" + strings.TrimPrefix(s, "/") - - path, ok := storage.ParsePath(s) + path, ok := storage.ParsePath("/" + strings.TrimPrefix(s, "/")) if !ok { return nil, errors.New("invalid path") } @@ -29,7 +26,7 @@ func ParseDataPath(s string) (ast.Ref, error) { // ArrayPath will take an ast.Array and build an ast.Ref using the ast.Terms in the Array func ArrayPath(a *ast.Array) ast.Ref { - var ref ast.Ref + ref := make(ast.Ref, 0, a.Len()) a.Foreach(func(term *ast.Term) { ref = append(ref, term) diff --git a/vendor/github.com/open-policy-agent/opa/internal/rego/opa/engine.go b/vendor/github.com/open-policy-agent/opa/internal/rego/opa/engine.go index 36ee84450..7defdf788 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/rego/opa/engine.go +++ b/vendor/github.com/open-policy-agent/opa/internal/rego/opa/engine.go @@ -36,10 +36,10 @@ type EvalEngine interface { Init() (EvalEngine, error) Entrypoints(context.Context) (map[string]int32, error) WithPolicyBytes([]byte) EvalEngine - WithDataJSON(interface{}) EvalEngine + WithDataJSON(any) EvalEngine Eval(context.Context, EvalOpts) (*Result, error) - SetData(context.Context, interface{}) error - SetDataPath(context.Context, []string, interface{}) error + SetData(context.Context, any) error + SetDataPath(context.Context, []string, any) error RemoveDataPath(context.Context, []string) error Close() } diff --git a/vendor/github.com/open-policy-agent/opa/internal/rego/opa/options.go b/vendor/github.com/open-policy-agent/opa/internal/rego/opa/options.go index 072e37667..97aa41bf0 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/rego/opa/options.go +++ b/vendor/github.com/open-policy-agent/opa/internal/rego/opa/options.go @@ -18,7 +18,7 @@ type Result struct { // EvalOpts define options for performing an evaluation. type EvalOpts struct { - Input *interface{} + Input *any Metrics metrics.Metrics Entrypoint int32 Time time.Time diff --git a/vendor/github.com/open-policy-agent/opa/internal/report/report.go b/vendor/github.com/open-policy-agent/opa/internal/report/report.go index 55f4cfe21..b517864ed 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/report/report.go +++ b/vendor/github.com/open-policy-agent/opa/internal/report/report.go @@ -81,9 +81,9 @@ func New(id string, opts Options) (*Reporter, error) { url = ExternalServiceURL } - restConfig := []byte(fmt.Sprintf(`{ + restConfig := fmt.Appendf(nil, `{ "url": %q, - }`, url)) + }`, url) client, err := rest.New(restConfig, map[string]*keys.Config{}, rest.Logger(opts.Logger)) if err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/internal/strvals/parser.go b/vendor/github.com/open-policy-agent/opa/internal/strvals/parser.go index 3b12d9526..6d867262f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/strvals/parser.go +++ b/vendor/github.com/open-policy-agent/opa/internal/strvals/parser.go @@ -46,8 +46,8 @@ func ToYAML(s string) (string, error) { // Parse parses a set line. // // A set line is of the form name1=value1,name2=value2 -func Parse(s string) (map[string]interface{}, error) { - vals := map[string]interface{}{} +func Parse(s string) (map[string]any, error) { + vals := map[string]any{} scanner := bytes.NewBufferString(s) t := newParser(scanner, vals, false) err := t.parse() @@ -57,8 +57,8 @@ func Parse(s string) (map[string]interface{}, error) { // ParseString parses a set line and forces a string value. // // A set line is of the form name1=value1,name2=value2 -func ParseString(s string) (map[string]interface{}, error) { - vals := map[string]interface{}{} +func ParseString(s string) (map[string]any, error) { + vals := map[string]any{} scanner := bytes.NewBufferString(s) t := newParser(scanner, vals, true) err := t.parse() @@ -69,7 +69,7 @@ func ParseString(s string) (map[string]interface{}, error) { // // If the strval string has a key that exists in dest, it overwrites the // dest version. -func ParseInto(s string, dest map[string]interface{}) error { +func ParseInto(s string, dest map[string]any) error { scanner := bytes.NewBufferString(s) t := newParser(scanner, dest, false) return t.parse() @@ -78,7 +78,7 @@ func ParseInto(s string, dest map[string]interface{}) error { // ParseIntoFile parses a filevals line and merges the result into dest. // // This method always returns a string as the value. -func ParseIntoFile(s string, dest map[string]interface{}, runesToVal runesToVal) error { +func ParseIntoFile(s string, dest map[string]any, runesToVal runesToVal) error { scanner := bytes.NewBufferString(s) t := newFileParser(scanner, dest, runesToVal) return t.parse() @@ -87,7 +87,7 @@ func ParseIntoFile(s string, dest map[string]interface{}, runesToVal runesToVal) // ParseIntoString parses a strvals line and merges the result into dest. // // This method always returns a string as the value. -func ParseIntoString(s string, dest map[string]interface{}) error { +func ParseIntoString(s string, dest map[string]any) error { scanner := bytes.NewBufferString(s) t := newParser(scanner, dest, true) return t.parse() @@ -101,20 +101,20 @@ func ParseIntoString(s string, dest map[string]interface{}) error { // where st is a boolean to figure out if we're forcing it to parse values as string type parser struct { sc *bytes.Buffer - data map[string]interface{} + data map[string]any runesToVal runesToVal } -type runesToVal func([]rune) (interface{}, error) +type runesToVal func([]rune) (any, error) -func newParser(sc *bytes.Buffer, data map[string]interface{}, stringBool bool) *parser { - rs2v := func(rs []rune) (interface{}, error) { +func newParser(sc *bytes.Buffer, data map[string]any, stringBool bool) *parser { + rs2v := func(rs []rune) (any, error) { return typedVal(rs, stringBool), nil } return &parser{sc: sc, data: data, runesToVal: rs2v} } -func newFileParser(sc *bytes.Buffer, data map[string]interface{}, runesToVal runesToVal) *parser { +func newFileParser(sc *bytes.Buffer, data map[string]any, runesToVal runesToVal) *parser { return &parser{sc: sc, data: data, runesToVal: runesToVal} } @@ -139,7 +139,7 @@ func runeSet(r []rune) map[rune]bool { return s } -func (t *parser) key(data map[string]interface{}) error { +func (t *parser) key(data map[string]any) error { stop := runeSet([]rune{'=', '[', ',', '.'}) for { switch k, last, err := runesUntil(t.sc, stop); { @@ -156,9 +156,9 @@ func (t *parser) key(data map[string]interface{}) error { } kk := string(k) // Find or create target list - list := []interface{}{} + list := []any{} if _, ok := data[kk]; ok { - list = data[kk].([]interface{}) + list = data[kk].([]any) } // Now we need to get the value after the ]. @@ -194,9 +194,9 @@ func (t *parser) key(data map[string]interface{}) error { return fmt.Errorf("key %q has no value (cannot end with ,)", string(k)) case last == '.': // First, create or find the target map. - inner := map[string]interface{}{} + inner := map[string]any{} if _, ok := data[string(k)]; ok { - inner = data[string(k)].(map[string]interface{}) + inner = data[string(k)].(map[string]any) } // Recurse @@ -210,7 +210,7 @@ func (t *parser) key(data map[string]interface{}) error { } } -func set(data map[string]interface{}, key string, val interface{}) { +func set(data map[string]any, key string, val any) { // If key is empty, don't set it. if len(key) == 0 { return @@ -218,7 +218,7 @@ func set(data map[string]interface{}, key string, val interface{}) { data[key] = val } -func setIndex(list []interface{}, index int, val interface{}) (l2 []interface{}, err error) { +func setIndex(list []any, index int, val any) (l2 []any, err error) { // There are possible index values that are out of range on a target system // causing a panic. This will catch the panic and return an error instead. // The value of the index that causes a panic varies from system to system. @@ -235,7 +235,7 @@ func setIndex(list []interface{}, index int, val interface{}) (l2 []interface{}, return list, fmt.Errorf("index of %d is greater than maximum supported index of %d", index, MaxIndex) } if len(list) <= index { - newlist := make([]interface{}, index+1) + newlist := make([]any, index+1) copy(newlist, list) list = newlist } @@ -254,7 +254,7 @@ func (t *parser) keyIndex() (int, error) { return strconv.Atoi(string(v)) } -func (t *parser) listItem(list []interface{}, i int) ([]interface{}, error) { +func (t *parser) listItem(list []any, i int) ([]any, error) { if i < 0 { return list, fmt.Errorf("negative %d index not allowed", i) } @@ -298,14 +298,14 @@ func (t *parser) listItem(list []interface{}, i int) ([]interface{}, error) { return setIndex(list, i, list2) case last == '.': // We have a nested object. Send to t.key - inner := map[string]interface{}{} + inner := map[string]any{} if len(list) > i { var ok bool - inner, ok = list[i].(map[string]interface{}) + inner, ok = list[i].(map[string]any) if !ok { // We have indices out of order. Initialize empty value. - list[i] = map[string]interface{}{} - inner = list[i].(map[string]interface{}) + list[i] = map[string]any{} + inner = list[i].(map[string]any) } } @@ -326,21 +326,21 @@ func (t *parser) val() ([]rune, error) { return v, err } -func (t *parser) valList() ([]interface{}, error) { +func (t *parser) valList() ([]any, error) { r, _, e := t.sc.ReadRune() if e != nil { - return []interface{}{}, e + return []any{}, e } if r != '{' { e = t.sc.UnreadRune() if e != nil { - return []interface{}{}, e + return []any{}, e } - return []interface{}{}, ErrNotList + return []any{}, ErrNotList } - list := []interface{}{} + list := []any{} stop := runeSet([]rune{',', '}'}) for { switch rs, last, err := runesUntil(t.sc, stop); { @@ -354,7 +354,7 @@ func (t *parser) valList() ([]interface{}, error) { if r, _, e := t.sc.ReadRune(); e == nil && r != ',' { e = t.sc.UnreadRune() if e != nil { - return []interface{}{}, e + return []any{}, e } } v, e := t.runesToVal(rs) @@ -395,7 +395,7 @@ func inMap(k rune, m map[rune]bool) bool { return ok } -func typedVal(v []rune, st bool) interface{} { +func typedVal(v []rune, st bool) any { val := string(v) if st { diff --git a/vendor/github.com/open-policy-agent/opa/internal/uuid/uuid.go b/vendor/github.com/open-policy-agent/opa/internal/uuid/uuid.go index 5d925e68d..a18f024a2 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/uuid/uuid.go +++ b/vendor/github.com/open-policy-agent/opa/internal/uuid/uuid.go @@ -32,12 +32,12 @@ func New(r io.Reader) (string, error) { // if parsing fails, it will return an empty map. It will fill the map // with some decoded values with fillMap // ref: https://datatracker.ietf.org/doc/html/rfc4122 -func Parse(s string) (map[string]interface{}, error) { +func Parse(s string) (map[string]any, error) { uuid, err := uuid.Parse(s) if err != nil { return nil, err } - out := make(map[string]interface{}, getVersionLen(int(uuid.Version()))) + out := make(map[string]any, getVersionLen(int(uuid.Version()))) fillMap(out, uuid) return out, nil } @@ -46,7 +46,7 @@ func Parse(s string) (map[string]interface{}, error) { // Version 1-2 has decodable values that could be of use, version 4 is random, // and version 3,5 is not feasible to extract data. Generated with either MD5 or SHA1 hash // ref: https://datatracker.ietf.org/doc/html/rfc4122 about creation of UUIDs -func fillMap(m map[string]interface{}, u uuid.UUID) { +func fillMap(m map[string]any, u uuid.UUID) { m["version"] = int(u.Version()) m["variant"] = u.Variant().String() switch version := m["version"]; version { diff --git a/vendor/github.com/open-policy-agent/opa/internal/version/version.go b/vendor/github.com/open-policy-agent/opa/internal/version/version.go index dc52733fc..1264278e4 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/version/version.go +++ b/vendor/github.com/open-policy-agent/opa/internal/version/version.go @@ -24,7 +24,7 @@ func Write(ctx context.Context, store storage.Store, txn storage.Transaction) er return err } - return store.Write(ctx, txn, storage.AddOp, versionPath, map[string]interface{}{ + return store.Write(ctx, txn, storage.AddOp, versionPath, map[string]any{ "version": version.Version, "build_commit": version.Vcs, "build_timestamp": version.Timestamp, diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/control.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/control.go index 38f030982..0b2805247 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/control.go +++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/control.go @@ -112,8 +112,8 @@ func (Br) Op() opcode.Opcode { } // ImmediateArgs returns the block index to break to. -func (i Br) ImmediateArgs() []interface{} { - return []interface{}{i.Index} +func (i Br) ImmediateArgs() []any { + return []any{i.Index} } // BrIf represents a WASM br_if instruction. @@ -127,8 +127,8 @@ func (BrIf) Op() opcode.Opcode { } // ImmediateArgs returns the block index to break to. -func (i BrIf) ImmediateArgs() []interface{} { - return []interface{}{i.Index} +func (i BrIf) ImmediateArgs() []any { + return []any{i.Index} } // Call represents a WASM call instruction. @@ -142,8 +142,8 @@ func (Call) Op() opcode.Opcode { } // ImmediateArgs returns the function index. -func (i Call) ImmediateArgs() []interface{} { - return []interface{}{i.Index} +func (i Call) ImmediateArgs() []any { + return []any{i.Index} } // CallIndirect represents a WASM call_indirect instruction. @@ -158,8 +158,8 @@ func (CallIndirect) Op() opcode.Opcode { } // ImmediateArgs returns the function index. -func (i CallIndirect) ImmediateArgs() []interface{} { - return []interface{}{i.Index, i.Reserved} +func (i CallIndirect) ImmediateArgs() []any { + return []any{i.Index, i.Reserved} } // Return represents a WASM return instruction. diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/instruction.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/instruction.go index 066be77c4..a0ab5953b 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/instruction.go +++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/instruction.go @@ -15,14 +15,14 @@ type NoImmediateArgs struct { } // ImmediateArgs returns the immedate arguments of an instruction. -func (NoImmediateArgs) ImmediateArgs() []interface{} { +func (NoImmediateArgs) ImmediateArgs() []any { return nil } // Instruction represents a single WASM instruction. type Instruction interface { Op() opcode.Opcode - ImmediateArgs() []interface{} + ImmediateArgs() []any } // StructuredInstruction represents a structured control instruction like br_if. diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/memory.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/memory.go index c449cb1b6..5a052bb76 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/memory.go +++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/memory.go @@ -18,8 +18,8 @@ func (I32Load) Op() opcode.Opcode { } // ImmediateArgs returns the static offset and alignment operands. -func (i I32Load) ImmediateArgs() []interface{} { - return []interface{}{i.Align, i.Offset} +func (i I32Load) ImmediateArgs() []any { + return []any{i.Align, i.Offset} } // I32Store represents the WASM i32.store instruction. @@ -34,6 +34,6 @@ func (I32Store) Op() opcode.Opcode { } // ImmediateArgs returns the static offset and alignment operands. -func (i I32Store) ImmediateArgs() []interface{} { - return []interface{}{i.Align, i.Offset} +func (i I32Store) ImmediateArgs() []any { + return []any{i.Align, i.Offset} } diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/numeric.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/numeric.go index 03f33752a..bbba1f0bc 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/numeric.go +++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/numeric.go @@ -19,8 +19,8 @@ func (I32Const) Op() opcode.Opcode { } // ImmediateArgs returns the i32 value to push onto the stack. -func (i I32Const) ImmediateArgs() []interface{} { - return []interface{}{i.Value} +func (i I32Const) ImmediateArgs() []any { + return []any{i.Value} } // I64Const represents the WASM i64.const instruction. @@ -34,8 +34,8 @@ func (I64Const) Op() opcode.Opcode { } // ImmediateArgs returns the i64 value to push onto the stack. -func (i I64Const) ImmediateArgs() []interface{} { - return []interface{}{i.Value} +func (i I64Const) ImmediateArgs() []any { + return []any{i.Value} } // F32Const represents the WASM f32.const instruction. @@ -49,8 +49,8 @@ func (F32Const) Op() opcode.Opcode { } // ImmediateArgs returns the f32 value to push onto the stack. -func (i F32Const) ImmediateArgs() []interface{} { - return []interface{}{i.Value} +func (i F32Const) ImmediateArgs() []any { + return []any{i.Value} } // F64Const represents the WASM f64.const instruction. @@ -64,8 +64,8 @@ func (F64Const) Op() opcode.Opcode { } // ImmediateArgs returns the f64 value to push onto the stack. -func (i F64Const) ImmediateArgs() []interface{} { - return []interface{}{i.Value} +func (i F64Const) ImmediateArgs() []any { + return []any{i.Value} } // I32Eqz represents the WASM i32.eqz instruction. diff --git a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/variable.go b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/variable.go index 063ffdb96..68be486af 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/variable.go +++ b/vendor/github.com/open-policy-agent/opa/internal/wasm/instruction/variable.go @@ -17,8 +17,8 @@ func (GetLocal) Op() opcode.Opcode { } // ImmediateArgs returns the index of the local variable to push onto the stack. -func (i GetLocal) ImmediateArgs() []interface{} { - return []interface{}{i.Index} +func (i GetLocal) ImmediateArgs() []any { + return []any{i.Index} } // SetLocal represents the WASM set_local instruction. @@ -33,8 +33,8 @@ func (SetLocal) Op() opcode.Opcode { // ImmediateArgs returns the index of the local variable to set with the top of // the stack. -func (i SetLocal) ImmediateArgs() []interface{} { - return []interface{}{i.Index} +func (i SetLocal) ImmediateArgs() []any { + return []any{i.Index} } // TeeLocal represents the WASM tee_local instruction. @@ -49,6 +49,6 @@ func (TeeLocal) Op() opcode.Opcode { // ImmediateArgs returns the index of the local variable to "tee" with the top of // the stack (like set, but retaining the top of the stack). -func (i TeeLocal) ImmediateArgs() []interface{} { - return []interface{}{i.Index} +func (i TeeLocal) ImmediateArgs() []any { + return []any{i.Index} } diff --git a/vendor/github.com/open-policy-agent/opa/rego/rego.go b/vendor/github.com/open-policy-agent/opa/rego/rego.go index e6af30c39..bdcf6c291 100644 --- a/vendor/github.com/open-policy-agent/opa/rego/rego.go +++ b/vendor/github.com/open-policy-agent/opa/rego/rego.go @@ -43,7 +43,7 @@ type EvalContext = v1.EvalContext type EvalOption = v1.EvalOption // EvalInput configures the input for a Prepared Query's evaluation -func EvalInput(input interface{}) EvalOption { +func EvalInput(input any) EvalOption { return v1.EvalInput(input) } @@ -155,7 +155,7 @@ func EvalSortSets(yes bool) EvalOption { return v1.EvalSortSets(yes) } -// EvalCopyMaps causes the evaluator to copy `map[string]interface{}`s before returning them. +// EvalCopyMaps causes the evaluator to copy `map[string]any`s before returning them. func EvalCopyMaps(yes bool) EvalOption { return v1.EvalCopyMaps(yes) } @@ -312,7 +312,7 @@ func ParsedImports(imp []*ast.Import) func(r *Rego) { // Input returns an argument that sets the Rego input document. Input should be // a native Go value representing the input document. -func Input(x interface{}) func(r *Rego) { +func Input(x any) func(r *Rego) { return v1.Input(x) } @@ -545,7 +545,7 @@ func Target(t string) func(r *Rego) { } // GenerateJSON sets the AST to JSON converter for the results. -func GenerateJSON(f func(*ast.Term, *EvalContext) (interface{}, error)) func(r *Rego) { +func GenerateJSON(f func(*ast.Term, *EvalContext) (any, error)) func(r *Rego) { return v1.GenerateJSON(f) } diff --git a/vendor/github.com/open-policy-agent/opa/storage/storage.go b/vendor/github.com/open-policy-agent/opa/storage/storage.go index c02773d98..d1abc1046 100644 --- a/vendor/github.com/open-policy-agent/opa/storage/storage.go +++ b/vendor/github.com/open-policy-agent/opa/storage/storage.go @@ -20,14 +20,14 @@ func NewTransactionOrDie(ctx context.Context, store Store, params ...Transaction // ReadOne is a convenience function to read a single value from the provided Store. It // will create a new Transaction to perform the read with, and clean up after itself // should an error occur. -func ReadOne(ctx context.Context, store Store, path Path) (interface{}, error) { +func ReadOne(ctx context.Context, store Store, path Path) (any, error) { return v1.ReadOne(ctx, store, path) } // WriteOne is a convenience function to write a single value to the provided Store. It // will create a new Transaction to perform the write with, and clean up after itself // should an error occur. -func WriteOne(ctx context.Context, store Store, op PatchOp, path Path, value interface{}) error { +func WriteOne(ctx context.Context, store Store, op PatchOp, path Path, value any) error { return v1.WriteOne(ctx, store, op, path, value) } diff --git a/vendor/github.com/open-policy-agent/opa/types/types.go b/vendor/github.com/open-policy-agent/opa/types/types.go index b888b27b6..0dd428de7 100644 --- a/vendor/github.com/open-policy-agent/opa/types/types.go +++ b/vendor/github.com/open-policy-agent/opa/types/types.go @@ -90,7 +90,7 @@ func NewSet(of Type) *Set { type StaticProperty = v1.StaticProperty // NewStaticProperty returns a new StaticProperty object. -func NewStaticProperty(key interface{}, value Type) *StaticProperty { +func NewStaticProperty(key any, value Type) *StaticProperty { return v1.NewStaticProperty(key, value) } @@ -173,7 +173,7 @@ func Or(a, b Type) Type { } // Select returns a property or item of a. -func Select(a Type, x interface{}) Type { +func Select(a Type, x any) Type { return v1.Select(a, x) } @@ -195,6 +195,6 @@ func Nil(a Type) bool { } // TypeOf returns the type of the Golang native value. -func TypeOf(x interface{}) Type { +func TypeOf(x any) Type { return v1.TypeOf(x) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go b/vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go index def7604ed..a83cdf9c0 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/annotations.go @@ -23,27 +23,6 @@ const ( annotationScopeSubpackages = "subpackages" ) -var ( - scopeTerm = StringTerm("scope") - titleTerm = StringTerm("title") - entrypointTerm = StringTerm("entrypoint") - descriptionTerm = StringTerm("description") - organizationsTerm = StringTerm("organizations") - authorsTerm = StringTerm("authors") - relatedResourcesTerm = StringTerm("related_resources") - schemasTerm = StringTerm("schemas") - customTerm = StringTerm("custom") - refTerm = StringTerm("ref") - nameTerm = StringTerm("name") - emailTerm = StringTerm("email") - schemaTerm = StringTerm("schema") - definitionTerm = StringTerm("definition") - documentTerm = StringTerm(annotationScopeDocument) - packageTerm = StringTerm(annotationScopePackage) - ruleTerm = StringTerm(annotationScopeRule) - subpackagesTerm = StringTerm(annotationScopeSubpackages) -) - type ( // Annotations represents metadata attached to other AST nodes such as rules. Annotations struct { @@ -55,7 +34,7 @@ type ( RelatedResources []*RelatedResourceAnnotation `json:"related_resources,omitempty"` Authors []*AuthorAnnotation `json:"authors,omitempty"` Schemas []*SchemaAnnotation `json:"schemas,omitempty"` - Custom map[string]interface{} `json:"custom,omitempty"` + Custom map[string]any `json:"custom,omitempty"` Location *Location `json:"location,omitempty"` comments []*Comment @@ -64,9 +43,9 @@ type ( // SchemaAnnotation contains a schema declaration for the document identified by the path. SchemaAnnotation struct { - Path Ref `json:"path"` - Schema Ref `json:"schema,omitempty"` - Definition *interface{} `json:"definition,omitempty"` + Path Ref `json:"path"` + Schema Ref `json:"schema,omitempty"` + Definition *any `json:"definition,omitempty"` } AuthorAnnotation struct { @@ -203,7 +182,7 @@ func (a *Annotations) MarshalJSON() ([]byte, error) { return []byte(`{"scope":""}`), nil } - data := map[string]interface{}{ + data := map[string]any{ "scope": a.Scope, } @@ -283,7 +262,7 @@ func (ar *AnnotationsRef) GetRule() *Rule { } func (ar *AnnotationsRef) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + data := map[string]any{ "path": ar.Path, } @@ -369,10 +348,7 @@ func compareRelatedResources(a, b []*RelatedResourceAnnotation) int { } func compareSchemas(a, b []*SchemaAnnotation) int { - maxLen := len(a) - if len(b) < maxLen { - maxLen = len(b) - } + maxLen := min(len(b), len(a)) for i := range maxLen { if cmp := a[i].Compare(b[i]); cmp != 0 { @@ -447,28 +423,28 @@ func (a *Annotations) toObject() (*Object, *Error) { if len(a.Scope) > 0 { switch a.Scope { case annotationScopeDocument: - obj.Insert(scopeTerm, documentTerm) + obj.Insert(InternedStringTerm("scope"), InternedStringTerm("document")) case annotationScopePackage: - obj.Insert(scopeTerm, packageTerm) + obj.Insert(InternedStringTerm("scope"), InternedStringTerm("package")) case annotationScopeRule: - obj.Insert(scopeTerm, ruleTerm) + obj.Insert(InternedStringTerm("scope"), InternedStringTerm("rule")) case annotationScopeSubpackages: - obj.Insert(scopeTerm, subpackagesTerm) + obj.Insert(InternedStringTerm("scope"), InternedStringTerm("subpackages")) default: - obj.Insert(scopeTerm, StringTerm(a.Scope)) + obj.Insert(InternedStringTerm("scope"), StringTerm(a.Scope)) } } if len(a.Title) > 0 { - obj.Insert(titleTerm, StringTerm(a.Title)) + obj.Insert(InternedStringTerm("title"), StringTerm(a.Title)) } if a.Entrypoint { - obj.Insert(entrypointTerm, InternedBooleanTerm(true)) + obj.Insert(InternedStringTerm("entrypoint"), InternedBooleanTerm(true)) } if len(a.Description) > 0 { - obj.Insert(descriptionTerm, StringTerm(a.Description)) + obj.Insert(InternedStringTerm("description"), StringTerm(a.Description)) } if len(a.Organizations) > 0 { @@ -476,19 +452,19 @@ func (a *Annotations) toObject() (*Object, *Error) { for _, org := range a.Organizations { orgs = append(orgs, StringTerm(org)) } - obj.Insert(organizationsTerm, ArrayTerm(orgs...)) + obj.Insert(InternedStringTerm("organizations"), ArrayTerm(orgs...)) } if len(a.RelatedResources) > 0 { rrs := make([]*Term, 0, len(a.RelatedResources)) for _, rr := range a.RelatedResources { - rrObj := NewObject(Item(refTerm, StringTerm(rr.Ref.String()))) + rrObj := NewObject(Item(InternedStringTerm("ref"), StringTerm(rr.Ref.String()))) if len(rr.Description) > 0 { - rrObj.Insert(descriptionTerm, StringTerm(rr.Description)) + rrObj.Insert(InternedStringTerm("description"), StringTerm(rr.Description)) } rrs = append(rrs, NewTerm(rrObj)) } - obj.Insert(relatedResourcesTerm, ArrayTerm(rrs...)) + obj.Insert(InternedStringTerm("related_resources"), ArrayTerm(rrs...)) } if len(a.Authors) > 0 { @@ -496,14 +472,14 @@ func (a *Annotations) toObject() (*Object, *Error) { for _, author := range a.Authors { aObj := NewObject() if len(author.Name) > 0 { - aObj.Insert(nameTerm, StringTerm(author.Name)) + aObj.Insert(InternedStringTerm("name"), StringTerm(author.Name)) } if len(author.Email) > 0 { - aObj.Insert(emailTerm, StringTerm(author.Email)) + aObj.Insert(InternedStringTerm("email"), StringTerm(author.Email)) } as = append(as, NewTerm(aObj)) } - obj.Insert(authorsTerm, ArrayTerm(as...)) + obj.Insert(InternedStringTerm("authors"), ArrayTerm(as...)) } if len(a.Schemas) > 0 { @@ -511,21 +487,21 @@ func (a *Annotations) toObject() (*Object, *Error) { for _, s := range a.Schemas { sObj := NewObject() if len(s.Path) > 0 { - sObj.Insert(pathTerm, NewTerm(s.Path.toArray())) + sObj.Insert(InternedStringTerm("path"), NewTerm(s.Path.toArray())) } if len(s.Schema) > 0 { - sObj.Insert(schemaTerm, NewTerm(s.Schema.toArray())) + sObj.Insert(InternedStringTerm("schema"), NewTerm(s.Schema.toArray())) } if s.Definition != nil { def, err := InterfaceToValue(s.Definition) if err != nil { return nil, NewError(CompileErr, a.Location, "invalid definition in schema annotation: %s", err.Error()) } - sObj.Insert(definitionTerm, NewTerm(def)) + sObj.Insert(InternedStringTerm("definition"), NewTerm(def)) } ss = append(ss, NewTerm(sObj)) } - obj.Insert(schemasTerm, ArrayTerm(ss...)) + obj.Insert(InternedStringTerm("schemas"), ArrayTerm(ss...)) } if len(a.Custom) > 0 { @@ -533,7 +509,7 @@ func (a *Annotations) toObject() (*Object, *Error) { if err != nil { return nil, NewError(CompileErr, a.Location, "invalid custom annotation %s", err.Error()) } - obj.Insert(customTerm, NewTerm(c)) + obj.Insert(InternedStringTerm("custom"), NewTerm(c)) } return &obj, nil @@ -562,7 +538,7 @@ func attachRuleAnnotations(mod *Module) { } if found && j < len(cpy) { - cpy = append(cpy[:j], cpy[j+1:]...) + cpy = slices.Delete(cpy, j, j+1) } } } @@ -696,7 +672,7 @@ func (rr *RelatedResourceAnnotation) String() string { } func (rr *RelatedResourceAnnotation) MarshalJSON() ([]byte, error) { - d := map[string]interface{}{ + d := map[string]any{ "ref": rr.Ref.String(), } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go b/vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go index 32ab2d153..6b094f69e 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/builtins.go @@ -20,6 +20,13 @@ func RegisterBuiltin(b *Builtin) { BuiltinMap[b.Name] = b if len(b.Infix) > 0 { BuiltinMap[b.Infix] = b + + InternStringTerm(b.Infix) + } + + InternStringTerm(b.Name) + if strings.Contains(b.Name, ".") { + InternStringTerm(strings.Split(b.Name, ".")...) } } @@ -3388,7 +3395,7 @@ func (b *Builtin) Ref() Ref { ref := make(Ref, len(parts)) ref[0] = VarTerm(parts[0]) for i := 1; i < len(parts); i++ { - ref[i] = StringTerm(parts[i]) + ref[i] = InternedStringTerm(parts[i]) } return ref } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/check.go b/vendor/github.com/open-policy-agent/opa/v1/ast/check.go index ecfb32064..ffc3d8b26 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/check.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/check.go @@ -6,6 +6,7 @@ package ast import ( "fmt" + "slices" "sort" "strings" @@ -179,7 +180,7 @@ func (tc *typeChecker) CheckTypes(env *TypeEnv, sorted []util.T, as *AnnotationS func (tc *typeChecker) checkClosures(env *TypeEnv, expr *Expr) Errors { var result Errors - WalkClosures(expr, func(x interface{}) bool { + WalkClosures(expr, func(x any) bool { switch x := x.(type) { case *ArrayComprehension: _, errs := tc.copy().CheckBody(env, x.Body) @@ -702,7 +703,7 @@ func newRefChecker(env *TypeEnv, f varRewriter) *refChecker { } } -func (rc *refChecker) Visit(x interface{}) bool { +func (rc *refChecker) Visit(x any) bool { switch x := x.(type) { case *ArrayComprehension, *ObjectComprehension, *SetComprehension: return true @@ -1006,12 +1007,7 @@ func (d *ArgErrDetail) Lines() []string { } func (d *ArgErrDetail) nilType() bool { - for i := range d.Have { - if types.Nil(d.Have[i]) { - return true - } - } - return false + return slices.ContainsFunc(d.Have, types.Nil) } // UnificationErrDetail describes a type mismatch error when two values are @@ -1247,8 +1243,8 @@ func override(ref Ref, t types.Type, o types.Type, rule *Rule) (types.Type, *Err return types.NewObject(newStaticProps, obj.DynamicProperties()), nil } -func getKeys(ref Ref, rule *Rule) ([]interface{}, *Error) { - keys := []interface{}{} +func getKeys(ref Ref, rule *Rule) ([]any, *Error) { + keys := []any{} for _, refElem := range ref { key, err := JSON(refElem.Value) if err != nil { @@ -1259,7 +1255,7 @@ func getKeys(ref Ref, rule *Rule) ([]interface{}, *Error) { return keys, nil } -func getObjectTypeRec(keys []interface{}, o types.Type, d *types.DynamicProperty) *types.Object { +func getObjectTypeRec(keys []any, o types.Type, d *types.DynamicProperty) *types.Object { if len(keys) == 1 { staticProps := []*types.StaticProperty{types.NewStaticProperty(keys[0], o)} return types.NewObject(staticProps, d) @@ -1300,7 +1296,7 @@ func getRuleAnnotation(as *AnnotationSet, rule *Rule) (result []*SchemaAnnotatio func processAnnotation(ss *SchemaSet, annot *SchemaAnnotation, rule *Rule, allowNet []string) (types.Type, *Error) { - var schema interface{} + var schema any if annot.Schema != nil { if ss == nil { diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/compare.go b/vendor/github.com/open-policy-agent/opa/v1/ast/compare.go index 452c6365a..c4754341d 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/compare.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/compare.go @@ -36,7 +36,7 @@ import ( // Sets are considered equal if and only if the symmetric difference of a and b // is empty. // Other comparisons are consistent but not defined. -func Compare(a, b interface{}) int { +func Compare(a, b any) int { if t, ok := a.(*Term); ok { if t == nil { @@ -239,7 +239,7 @@ func (s termSlice) Less(i, j int) bool { return Compare(s[i].Value, s[j].Value) func (s termSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } func (s termSlice) Len() int { return len(s) } -func sortOrder(x interface{}) int { +func sortOrder(x any) int { switch x.(type) { case Null: return 0 @@ -296,10 +296,7 @@ func sortOrder(x interface{}) int { } func importsCompare(a, b []*Import) int { - minLen := len(a) - if len(b) < minLen { - minLen = len(b) - } + minLen := min(len(b), len(a)) for i := range minLen { if cmp := a[i].Compare(b[i]); cmp != 0 { return cmp @@ -315,10 +312,7 @@ func importsCompare(a, b []*Import) int { } func annotationsCompare(a, b []*Annotations) int { - minLen := len(a) - if len(b) < minLen { - minLen = len(b) - } + minLen := min(len(b), len(a)) for i := range minLen { if cmp := a[i].Compare(b[i]); cmp != 0 { return cmp @@ -334,10 +328,7 @@ func annotationsCompare(a, b []*Annotations) int { } func rulesCompare(a, b []*Rule) int { - minLen := len(a) - if len(b) < minLen { - minLen = len(b) - } + minLen := min(len(b), len(a)) for i := range minLen { if cmp := a[i].Compare(b[i]); cmp != 0 { return cmp @@ -353,10 +344,7 @@ func rulesCompare(a, b []*Rule) int { } func termSliceCompare(a, b []*Term) int { - minLen := len(a) - if len(b) < minLen { - minLen = len(b) - } + minLen := min(len(b), len(a)) for i := range minLen { if cmp := Compare(a[i], b[i]); cmp != 0 { return cmp @@ -371,10 +359,7 @@ func termSliceCompare(a, b []*Term) int { } func withSliceCompare(a, b []*With) int { - minLen := len(a) - if len(b) < minLen { - minLen = len(b) - } + minLen := min(len(b), len(a)) for i := range minLen { if cmp := Compare(a[i], b[i]); cmp != 0 { return cmp diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/compile.go b/vendor/github.com/open-policy-agent/opa/v1/ast/compile.go index 13855692c..2b800e2c9 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/compile.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/compile.go @@ -874,9 +874,7 @@ func (c *Compiler) PassesTypeCheckRules(rules []*Rule) Errors { c.builtins[bi.Name] = bi } - for name, bi := range c.customBuiltins { - c.builtins[name] = bi - } + maps.Copy(c.builtins, c.customBuiltins) c.TypeEnv = checker.Env(c.builtins) } @@ -966,12 +964,7 @@ func (c *Compiler) buildComprehensionIndices() { } } -var ( - keywordsTerm = StringTerm("keywords") - pathTerm = StringTerm("path") - annotationsTerm = StringTerm("annotations") - futureKeywordsPrefix = Ref{FutureRootDocument, keywordsTerm} -) +var futureKeywordsPrefix = Ref{FutureRootDocument, InternedStringTerm("keywords")} // buildRequiredCapabilities updates the required capabilities on the compiler // to include any keyword and feature dependencies present in the modules. The @@ -1085,7 +1078,7 @@ func (c *Compiler) checkSelfPath(loc *Location, eq func(a, b util.T) bool, a, b } } -func astNodeToString(x interface{}) string { +func astNodeToString(x any) string { return x.(*Rule).Ref().String() } @@ -1232,7 +1225,7 @@ func (c *Compiler) checkUndefinedFuncs() { } } -func checkUndefinedFuncs(env *TypeEnv, x interface{}, arity func(Ref) int, rwVars map[Var]Var) Errors { +func checkUndefinedFuncs(env *TypeEnv, x any, arity func(Ref) int, rwVars map[Var]Var) Errors { var errs Errors @@ -1337,7 +1330,7 @@ func (c *Compiler) checkSafetyRuleHeads() { } } -func compileSchema(goSchema interface{}, allowNet []string) (*gojsonschema.Schema, error) { +func compileSchema(goSchema any, allowNet []string) (*gojsonschema.Schema, error) { gojsonschema.SetAllowNet(allowNet) var refLoader gojsonschema.JSONLoader @@ -1410,11 +1403,11 @@ func newSchemaParser() *schemaParser { } } -func (parser *schemaParser) parseSchema(schema interface{}) (types.Type, error) { +func (parser *schemaParser) parseSchema(schema any) (types.Type, error) { return parser.parseSchemaWithPropertyKey(schema, "") } -func (parser *schemaParser) parseSchemaWithPropertyKey(schema interface{}, propertyKey string) (types.Type, error) { +func (parser *schemaParser) parseSchemaWithPropertyKey(schema any, propertyKey string) (types.Type, error) { subSchema, ok := schema.(*gojsonschema.SubSchema) if !ok { return nil, fmt.Errorf("unexpected schema type %v", subSchema) @@ -1701,9 +1694,7 @@ func (c *Compiler) init() { } } - for name, bi := range c.customBuiltins { - c.builtins[name] = bi - } + maps.Copy(c.builtins, c.customBuiltins) // Load the global input schema if one was provided. if c.schemaSet != nil { @@ -2061,7 +2052,7 @@ func (c *Compiler) rewritePrintCalls() { // checkVoidCalls returns errors for any expressions that treat void function // calls as values. The only void functions in Rego are specific built-ins like // print(). -func checkVoidCalls(env *TypeEnv, x interface{}) Errors { +func checkVoidCalls(env *TypeEnv, x any) Errors { var errs Errors WalkTerms(x, func(x *Term) bool { if call, ok := x.Value.(Call); ok { @@ -2097,7 +2088,7 @@ func rewritePrintCalls(gen *localVarGenerator, getArity func(Ref) int, globals V if ContainsClosures(body[i]) { safe := outputVarsForBody(body[:i], getArity, globals) safe.Update(globals) - WalkClosures(body[i], func(x interface{}) bool { + WalkClosures(body[i], func(x any) bool { var modrec bool var errsrec Errors switch x := x.(type) { @@ -2166,9 +2157,9 @@ func rewritePrintCalls(gen *localVarGenerator, getArity func(Ref) int, globals V return modified, nil } -func erasePrintCalls(node interface{}) bool { +func erasePrintCalls(node any) bool { var modified bool - NewGenericVisitor(func(x interface{}) bool { + NewGenericVisitor(func(x any) bool { var modrec bool switch x := x.(type) { case *Rule: @@ -2217,7 +2208,7 @@ func erasePrintCallsInBody(x Body) (bool, Body) { return true, cpy } -func containsPrintCall(x interface{}) bool { +func containsPrintCall(x any) bool { var found bool WalkExprs(x, func(expr *Expr) bool { if !found { @@ -2478,7 +2469,7 @@ func getPrimaryRuleAnnotations(as *AnnotationSet, rule *Rule) *Annotations { func rewriteRegoMetadataCalls(metadataChainVar *Var, metadataRuleVar *Var, body Body, rewrittenVars *map[Var]Var) Errors { var errs Errors - WalkClosures(body, func(x interface{}) bool { + WalkClosures(body, func(x any) bool { switch x := x.(type) { case *ArrayComprehension: errs = rewriteRegoMetadataCalls(metadataChainVar, metadataRuleVar, x.Body, rewrittenVars) @@ -2538,17 +2529,15 @@ func createMetadataChain(chain []*AnnotationsRef) (*Term, *Error) { metaArray := NewArray() for _, link := range chain { - p := link.Path.toArray(). - Slice(1, -1) // Dropping leading 'data' element of path - obj := NewObject( - Item(pathTerm, NewTerm(p)), - ) + // Dropping leading 'data' element of path + p := link.Path[1:].toArray() + obj := NewObject(Item(InternedStringTerm("path"), NewTerm(p))) if link.Annotations != nil { annotObj, err := link.Annotations.toObject() if err != nil { return nil, err } - obj.Insert(annotationsTerm, NewTerm(*annotObj)) + obj.Insert(InternedStringTerm("annotations"), NewTerm(*annotObj)) } metaArray = metaArray.Append(NewTerm(obj)) } @@ -2694,7 +2683,7 @@ type rewriteNestedHeadVarLocalTransform struct { strict bool } -func (xform *rewriteNestedHeadVarLocalTransform) Visit(x interface{}) bool { +func (xform *rewriteNestedHeadVarLocalTransform) Visit(x any) bool { if term, ok := x.(*Term); ok { @@ -2743,7 +2732,7 @@ type rewriteHeadVarLocalTransform struct { declared map[Var]Var } -func (xform rewriteHeadVarLocalTransform) Transform(x interface{}) (interface{}, error) { +func (xform rewriteHeadVarLocalTransform) Transform(x any) (any, error) { if v, ok := x.(Var); ok { if gv, ok := xform.declared[v]; ok { return gv, nil @@ -2774,7 +2763,7 @@ type ruleArgLocalRewriter struct { errs []*Error } -func (vis *ruleArgLocalRewriter) Visit(x interface{}) Visitor { +func (vis *ruleArgLocalRewriter) Visit(x any) Visitor { t, ok := x.(*Term) if !ok { @@ -2821,7 +2810,7 @@ func (c *Compiler) rewriteWithModifiers() { f := newEqualityFactory(c.localvargen) for _, name := range c.sorted { mod := c.Modules[name] - t := NewGenericTransformer(func(x interface{}) (interface{}, error) { + t := NewGenericTransformer(func(x any) (any, error) { body, ok := x.(Body) if !ok { return x, nil @@ -3174,7 +3163,7 @@ func (ci *ComprehensionIndex) String() string { return fmt.Sprintf("", NewArray(ci.Keys...)) } -func buildComprehensionIndices(dbg debug.Debug, arity func(Ref) int, candidates VarSet, rwVars map[Var]Var, node interface{}, result map[*Term]*ComprehensionIndex) uint64 { +func buildComprehensionIndices(dbg debug.Debug, arity func(Ref) int, candidates VarSet, rwVars map[Var]Var, node any, result map[*Term]*ComprehensionIndex) uint64 { var n uint64 cpy := candidates.Copy() WalkBodies(node, func(b Body) bool { @@ -3327,11 +3316,11 @@ func newComprehensionIndexRegressionCheckVisitor(candidates VarSet) *comprehensi } } -func (vis *comprehensionIndexRegressionCheckVisitor) Walk(x interface{}) { +func (vis *comprehensionIndexRegressionCheckVisitor) Walk(x any) { NewGenericVisitor(vis.visit).Walk(x) } -func (vis *comprehensionIndexRegressionCheckVisitor) visit(x interface{}) bool { +func (vis *comprehensionIndexRegressionCheckVisitor) visit(x any) bool { if !vis.worse { switch x := x.(type) { case *Expr: @@ -3371,11 +3360,11 @@ func newComprehensionIndexNestedCandidateVisitor(candidates VarSet) *comprehensi } } -func (vis *comprehensionIndexNestedCandidateVisitor) Walk(x interface{}) { +func (vis *comprehensionIndexNestedCandidateVisitor) Walk(x any) { NewGenericVisitor(vis.visit).Walk(x) } -func (vis *comprehensionIndexNestedCandidateVisitor) visit(x interface{}) bool { +func (vis *comprehensionIndexNestedCandidateVisitor) visit(x any) bool { if vis.found { return true @@ -3676,7 +3665,7 @@ func NewGraph(modules map[string]*Module, list func(Ref) []*Rule) *Graph { // each dependency. vis := func(a *Rule) *GenericVisitor { stop := false - return NewGenericVisitor(func(x interface{}) bool { + return NewGenericVisitor(func(x any) bool { switch x := x.(type) { case Ref: for _, b := range list(x) { @@ -4001,7 +3990,7 @@ type bodySafetyTransformer struct { unsafe unsafeVars } -func (xform *bodySafetyTransformer) Visit(x interface{}) bool { +func (xform *bodySafetyTransformer) Visit(x any) bool { switch term := x.(type) { case *Term: switch x := term.Value.(type) { @@ -4078,7 +4067,7 @@ func (xform *bodySafetyTransformer) reorderSetComprehensionSafety(sc *SetCompreh // this expression. func unsafeVarsInClosures(e *Expr) VarSet { vs := VarSet{} - WalkClosures(e, func(x interface{}) bool { + WalkClosures(e, func(x any) bool { vis := &VarVisitor{vars: vs} if ev, ok := x.(*Every); ok { vis.Walk(ev.Body) @@ -4198,7 +4187,7 @@ func outputVarsForExprCall(expr *Expr, arity int, safe VarSet, terms []*Term) Va return output } -func outputVarsForTerms(expr interface{}, safe VarSet) VarSet { +func outputVarsForTerms(expr any, safe VarSet) VarSet { output := VarSet{} WalkTerms(expr, func(x *Term) bool { switch r := x.Value.(type) { @@ -4250,7 +4239,7 @@ func newLocalVarGeneratorForModuleSet(sorted []string, modules map[string]*Modul return &localVarGenerator{exclude: exclude, next: 0} } -func newLocalVarGenerator(suffix string, node interface{}) *localVarGenerator { +func newLocalVarGenerator(suffix string, node any) *localVarGenerator { exclude := NewVarSet() vis := &VarVisitor{vars: exclude} vis.Walk(node) @@ -4340,7 +4329,7 @@ func resolveRefsInRule(globals map[Var]*usedRef, rule *Rule) error { // Walk args to collect vars and transform body so that callers can shadow // root documents. - vis = NewGenericVisitor(func(x interface{}) bool { + vis = NewGenericVisitor(func(x any) bool { if err != nil { return true } @@ -4421,7 +4410,10 @@ func resolveRefsInExpr(globals map[Var]*usedRef, ignore *declaredVarStack, expr cpy.Terms = buf case *SomeDecl: if val, ok := ts.Symbols[0].Value.(Call); ok { - cpy.Terms = &SomeDecl{Symbols: []*Term{CallTerm(resolveRefsInTermSlice(globals, ignore, val)...)}} + cpy.Terms = &SomeDecl{ + Symbols: []*Term{CallTerm(resolveRefsInTermSlice(globals, ignore, val)...)}, + Location: ts.Location, + } } case *Every: locals := NewVarSet() @@ -4558,9 +4550,9 @@ func (s *declaredVarStack) Pop() { *s = curr[:len(curr)-1] } -func declaredVars(x interface{}) VarSet { +func declaredVars(x any) VarSet { vars := NewVarSet() - vis := NewGenericVisitor(func(x interface{}) bool { + vis := NewGenericVisitor(func(x any) bool { switch x := x.(type) { case *Expr: if x.IsAssignment() && validEqAssignArgCount(x) { @@ -4609,8 +4601,8 @@ func declaredVars(x interface{}) VarSet { // The comprehension would be rewritten as: // // [__local0__ | x = y[_]; y = [1,2,3]; __local0__ = x[0]] -func rewriteComprehensionTerms(f *equalityFactory, node interface{}) (interface{}, error) { - return TransformComprehensions(node, func(x interface{}) (Value, error) { +func rewriteComprehensionTerms(f *equalityFactory, node any) (any, error) { + return TransformComprehensions(node, func(x any) (Value, error) { switch x := x.(type) { case *ArrayComprehension: if requiresEval(x.Term) { @@ -4643,6 +4635,8 @@ func rewriteComprehensionTerms(f *equalityFactory, node interface{}) (interface{ }) } +var doubleEq = Equal.Ref() + // rewriteEquals will rewrite exprs under x as unification calls instead of == // calls. For example: // @@ -4656,10 +4650,9 @@ func rewriteComprehensionTerms(f *equalityFactory, node interface{}) (interface{ // result back whereas with = the result is only ever true/undefined. For // partial evaluation cases we do want to rewrite == to = to simplify the // result. -func rewriteEquals(x interface{}) (modified bool) { - doubleEq := Equal.Ref() +func rewriteEquals(x any) (modified bool) { unifyOp := Equality.Ref() - t := NewGenericTransformer(func(x interface{}) (interface{}, error) { + t := NewGenericTransformer(func(x any) (any, error) { if x, ok := x.(*Expr); ok && x.IsCall() { operator := x.Operator() if operator.Equal(doubleEq) && len(x.Operands()) == 2 { @@ -5041,7 +5034,7 @@ func expandExprTermArray(gen *localVarGenerator, arr *Array) (support []*Expr) { } func expandExprTermSlice(gen *localVarGenerator, v []*Term) (support []*Expr) { - for i := 0; i < len(v); i++ { + for i := range v { var extras []*Expr extras, v[i] = expandExprTerm(gen, v[i]) support = append(support, extras...) @@ -5426,7 +5419,7 @@ func rewriteSomeDeclStatement(g *localVarGenerator, stack *localDeclaredVars, ex } func rewriteDeclaredVarsInExpr(g *localVarGenerator, stack *localDeclaredVars, expr *Expr, errs Errors, strict bool) (*Expr, Errors) { - vis := NewGenericVisitor(func(x interface{}) bool { + vis := NewGenericVisitor(func(x any) bool { var stop bool switch x := x.(type) { case *Term: @@ -5903,7 +5896,7 @@ func safetyErrorSlice(unsafe unsafeVars, rewritten map[Var]Var) (result Errors) return } -func checkUnsafeBuiltins(unsafeBuiltinsMap map[string]struct{}, node interface{}) Errors { +func checkUnsafeBuiltins(unsafeBuiltinsMap map[string]struct{}, node any) Errors { errs := make(Errors, 0) WalkExprs(node, func(x *Expr) bool { if x.IsCall() { diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/env.go b/vendor/github.com/open-policy-agent/opa/v1/ast/env.go index 9bffd03e0..12d4be891 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/env.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/env.go @@ -30,7 +30,7 @@ func newTypeEnv(f func() *typeChecker) *TypeEnv { // Get returns the type of x. // Deprecated: Use GetByValue or GetByRef instead, as they are more efficient. -func (env *TypeEnv) Get(x interface{}) types.Type { +func (env *TypeEnv) Get(x any) types.Type { if term, ok := x.(*Term); ok { x = term.Value } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/errors.go b/vendor/github.com/open-policy-agent/opa/v1/ast/errors.go index c7aab7114..75160afc6 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/errors.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/errors.go @@ -115,7 +115,7 @@ func (e *Error) Error() string { } // NewError returns a new Error object. -func NewError(code string, loc *Location, f string, a ...interface{}) *Error { +func NewError(code string, loc *Location, f string, a ...any) *Error { return &Error{ Code: code, Location: loc, diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/index.go b/vendor/github.com/open-policy-agent/opa/v1/ast/index.go index 722b70e57..9abababc2 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/index.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/index.go @@ -253,7 +253,7 @@ type ruleWalker struct { result *trieTraversalResult } -func (r *ruleWalker) Do(x interface{}) trieWalker { +func (r *ruleWalker) Do(x any) trieWalker { tn := x.(*trieNode) r.result.Add(tn) return r @@ -454,7 +454,7 @@ func (i *refindices) index(rule *Rule, ref Ref) *refindex { } type trieWalker interface { - Do(x interface{}) trieWalker + Do(x any) trieWalker } type trieTraversalResult struct { @@ -850,7 +850,7 @@ func indexValue(b *Term) (Value, bool) { case *Array: stop := false first := true - vis := NewGenericVisitor(func(x interface{}) bool { + vis := NewGenericVisitor(func(x any) bool { if first { first = false return false @@ -932,7 +932,7 @@ func globPatternToArray(pattern *Term, delim string) *Term { } } - return NewTerm(NewArray(arr...)) + return ArrayTerm(arr...) } // splits s on characters in delim except if delim characters have been escaped diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/interning.go b/vendor/github.com/open-policy-agent/opa/v1/ast/interning.go index 012cffb9a..7ef32d7bb 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/interning.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/interning.go @@ -4,10 +4,14 @@ package ast -import "strconv" +import ( + "strconv" +) // NOTE! Great care must be taken **not** to modify the terms returned // from these functions, as they are shared across all callers. +// This package is currently considered experimental, and may change +// at any time without notice. var ( booleanTrueTerm = &Term{Value: Boolean(true)} @@ -20,8 +24,26 @@ var ( InternedEmptyString = StringTerm("") InternedEmptyObject = ObjectTerm() + InternedEmptyArray = ArrayTerm() + InternedEmptySet = SetTerm() + + InternedEmptyArrayValue = NewArray() ) +// InternStringTerm interns the given strings as terms. Note that Interning is +// considered experimental and should not be relied upon by external code. +// WARNING: This must **only** be called at initialization time, as the +// interned terms are shared globally, and the underlying map is not thread-safe. +func InternStringTerm(str ...string) { + for _, s := range str { + if _, ok := internedStringTerms[s]; ok { + continue + } + + internedStringTerms[s] = StringTerm(s) + } +} + // InternedBooleanTerm returns an interned term with the given boolean value. func InternedBooleanTerm(b bool) *Term { if b { @@ -63,6 +85,9 @@ func HasInternedIntNumberTerm(i int) bool { return i >= -1 && i < len(intNumberTerms) } +// InternedStringTerm returns an interned term with the given string value. If the +// provided string is not interned, a new term is created for that value. It does *not* +// modify the global interned terms map. func InternedStringTerm(s string) *Term { if term, ok := internedStringTerms[s]; ok { return term @@ -71,19 +96,216 @@ func InternedStringTerm(s string) *Term { return StringTerm(s) } +// Returns an interned string term representing the integer value i, if +// interned. If not, creates a new StringTerm for the integer value. +func InternedIntegerString(i int) *Term { + // Cheapest option - we don't need to call strconv.Itoa + if HasInternedIntNumberTerm(i) { + if interned, ok := internedStringTerms[IntNumberTerm(i).String()]; ok { + return interned + } + } + + // Next cheapest option — the string could still be interned if the store + // has been extended with more terms than we cucrrently intern. + s := strconv.Itoa(i) + if interned, ok := internedStringTerms[s]; ok { + return interned + } + + // Nope, create a new term + return StringTerm(s) +} + var internedStringTerms = map[string]*Term{ - "": InternedEmptyString, - "0": StringTerm("0"), - "1": StringTerm("1"), - "2": StringTerm("2"), - "3": StringTerm("3"), - "4": StringTerm("4"), - "5": StringTerm("5"), - "6": StringTerm("6"), - "7": StringTerm("7"), - "8": StringTerm("8"), - "9": StringTerm("9"), - "10": StringTerm("10"), + "": InternedEmptyString, + "0": StringTerm("0"), + "1": StringTerm("1"), + "2": StringTerm("2"), + "3": StringTerm("3"), + "4": StringTerm("4"), + "5": StringTerm("5"), + "6": StringTerm("6"), + "7": StringTerm("7"), + "8": StringTerm("8"), + "9": StringTerm("9"), + "10": StringTerm("10"), + "11": StringTerm("11"), + "12": StringTerm("12"), + "13": StringTerm("13"), + "14": StringTerm("14"), + "15": StringTerm("15"), + "16": StringTerm("16"), + "17": StringTerm("17"), + "18": StringTerm("18"), + "19": StringTerm("19"), + "20": StringTerm("20"), + "21": StringTerm("21"), + "22": StringTerm("22"), + "23": StringTerm("23"), + "24": StringTerm("24"), + "25": StringTerm("25"), + "26": StringTerm("26"), + "27": StringTerm("27"), + "28": StringTerm("28"), + "29": StringTerm("29"), + "30": StringTerm("30"), + "31": StringTerm("31"), + "32": StringTerm("32"), + "33": StringTerm("33"), + "34": StringTerm("34"), + "35": StringTerm("35"), + "36": StringTerm("36"), + "37": StringTerm("37"), + "38": StringTerm("38"), + "39": StringTerm("39"), + "40": StringTerm("40"), + "41": StringTerm("41"), + "42": StringTerm("42"), + "43": StringTerm("43"), + "44": StringTerm("44"), + "45": StringTerm("45"), + "46": StringTerm("46"), + "47": StringTerm("47"), + "48": StringTerm("48"), + "49": StringTerm("49"), + "50": StringTerm("50"), + "51": StringTerm("51"), + "52": StringTerm("52"), + "53": StringTerm("53"), + "54": StringTerm("54"), + "55": StringTerm("55"), + "56": StringTerm("56"), + "57": StringTerm("57"), + "58": StringTerm("58"), + "59": StringTerm("59"), + "60": StringTerm("60"), + "61": StringTerm("61"), + "62": StringTerm("62"), + "63": StringTerm("63"), + "64": StringTerm("64"), + "65": StringTerm("65"), + "66": StringTerm("66"), + "67": StringTerm("67"), + "68": StringTerm("68"), + "69": StringTerm("69"), + "70": StringTerm("70"), + "71": StringTerm("71"), + "72": StringTerm("72"), + "73": StringTerm("73"), + "74": StringTerm("74"), + "75": StringTerm("75"), + "76": StringTerm("76"), + "77": StringTerm("77"), + "78": StringTerm("78"), + "79": StringTerm("79"), + "80": StringTerm("80"), + "81": StringTerm("81"), + "82": StringTerm("82"), + "83": StringTerm("83"), + "84": StringTerm("84"), + "85": StringTerm("85"), + "86": StringTerm("86"), + "87": StringTerm("87"), + "88": StringTerm("88"), + "89": StringTerm("89"), + "90": StringTerm("90"), + "91": StringTerm("91"), + "92": StringTerm("92"), + "93": StringTerm("93"), + "94": StringTerm("94"), + "95": StringTerm("95"), + "96": StringTerm("96"), + "97": StringTerm("97"), + "98": StringTerm("98"), + "99": StringTerm("99"), + "100": StringTerm("100"), + + // Types + "null": StringTerm("null"), + "boolean": StringTerm("boolean"), + "number": StringTerm("number"), + "string": StringTerm("string"), + "array": StringTerm("array"), + "object": StringTerm("object"), + "set": StringTerm("set"), + + // Runtime + "config": StringTerm("config"), + "env": StringTerm("env"), + "version": StringTerm("version"), + "commit": StringTerm("commit"), + "authorization_enabled": StringTerm("authorization_enabled"), + "skip_known_schema_check": StringTerm("skip_known_schema_check"), + + // Annotations + "annotations": StringTerm("annotations"), + "scope": StringTerm("scope"), + "title": StringTerm("title"), + "entrypoint": StringTerm("entrypoint"), + "description": StringTerm("description"), + "organizations": StringTerm("organizations"), + "authors": StringTerm("authors"), + "related_resources": StringTerm("related_resources"), + "schemas": StringTerm("schemas"), + "custom": StringTerm("custom"), + "ref": StringTerm("ref"), + "name": StringTerm("name"), + "email": StringTerm("email"), + "schema": StringTerm("schema"), + "definition": StringTerm("definition"), + "document": StringTerm("document"), + "package": StringTerm("package"), + "rule": StringTerm("rule"), + "subpackages": StringTerm("subpackages"), + + // Debug + "text": StringTerm("text"), + "value": StringTerm("value"), + "bindings": StringTerm("bindings"), + "expressions": StringTerm("expressions"), + + // Various + "data": StringTerm("data"), + "input": StringTerm("input"), + "result": StringTerm("result"), + "keywords": StringTerm("keywords"), + "path": StringTerm("path"), + "v1": StringTerm("v1"), + "error": StringTerm("error"), + "partial": StringTerm("partial"), + + // HTTP + "code": StringTerm("code"), + "message": StringTerm("message"), + "status_code": StringTerm("status_code"), + "method": StringTerm("method"), + "url": StringTerm("url"), + + // JWT + "enc": StringTerm("enc"), + "cty": StringTerm("cty"), + "iss": StringTerm("iss"), + "exp": StringTerm("exp"), + "nbf": StringTerm("nbf"), + "aud": StringTerm("aud"), + "secret": StringTerm("secret"), + "cert": StringTerm("cert"), + + // Decisions + "revision": StringTerm("revision"), + "labels": StringTerm("labels"), + "decision_id": StringTerm("decision_id"), + "bundles": StringTerm("bundles"), + "query": StringTerm("query"), + "mapped_result": StringTerm("mapped_result"), + "nd_builtin_cache": StringTerm("nd_builtin_cache"), + "erased": StringTerm("erased"), + "masked": StringTerm("masked"), + "requested_by": StringTerm("requested_by"), + "timestamp": StringTerm("timestamp"), + "metrics": StringTerm("metrics"), + "req_id": StringTerm("req_id"), } var stringToIntNumberTermMap = map[string]*Term{ diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/location/location.go b/vendor/github.com/open-policy-agent/opa/v1/ast/location/location.go index 716aad693..6d1b16cdf 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/location/location.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/location/location.go @@ -36,18 +36,18 @@ func (loc *Location) Equal(other *Location) bool { // Errorf returns a new error value with a message formatted to include the location // info (e.g., line, column, filename, etc.) -func (loc *Location) Errorf(f string, a ...interface{}) error { +func (loc *Location) Errorf(f string, a ...any) error { return errors.New(loc.Format(f, a...)) } // Wrapf returns a new error value that wraps an existing error with a message formatted // to include the location info (e.g., line, column, filename, etc.) -func (loc *Location) Wrapf(err error, f string, a ...interface{}) error { +func (loc *Location) Wrapf(err error, f string, a ...any) error { return fmt.Errorf(loc.Format(f, a...)+": %w", err) } // Format returns a formatted string prefixed with the location information. -func (loc *Location) Format(f string, a ...interface{}) string { +func (loc *Location) Format(f string, a ...any) string { if len(loc.File) > 0 { f = fmt.Sprintf("%v:%v: %v", loc.File, loc.Row, f) } else { diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/map.go b/vendor/github.com/open-policy-agent/opa/v1/ast/map.go index d0aa43755..31cad4d61 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/map.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/map.go @@ -26,9 +26,9 @@ func NewValueMap() *ValueMap { // MarshalJSON provides a custom marshaller for the ValueMap which // will include the key, value, and value type. func (vs *ValueMap) MarshalJSON() ([]byte, error) { - var tmp []map[string]interface{} + var tmp []map[string]any vs.Iter(func(k Value, v Value) bool { - tmp = append(tmp, map[string]interface{}{ + tmp = append(tmp, map[string]any{ "name": k.String(), "type": ValueName(v), "value": v, diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/parser.go b/vendor/github.com/open-policy-agent/opa/v1/ast/parser.go index 66779b8d7..695c2333f 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/parser.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/parser.go @@ -10,9 +10,11 @@ import ( "errors" "fmt" "io" + "maps" "math/big" "net/url" "regexp" + "slices" "sort" "strconv" "strings" @@ -26,7 +28,15 @@ import ( "github.com/open-policy-agent/opa/v1/ast/location" ) -var RegoV1CompatibleRef = Ref{VarTerm("rego"), StringTerm("v1")} +// DefaultMaxParsingRecursionDepth is the default maximum recursion +// depth for the parser +const DefaultMaxParsingRecursionDepth = 100000 + +// ErrMaxParsingRecursionDepthExceeded is returned when the parser +// recursion exceeds the maximum allowed depth +var ErrMaxParsingRecursionDepthExceeded = errors.New("max parsing recursion depth exceeded") + +var RegoV1CompatibleRef = Ref{VarTerm("rego"), InternedStringTerm("v1")} // RegoVersion defines the Rego syntax requirements for a module. type RegoVersion int @@ -112,10 +122,12 @@ func (s *state) Text(offset, end int) []byte { // Parser is used to parse Rego statements. type Parser struct { - r io.Reader - s *state - po ParserOptions - cache parsedTermCache + r io.Reader + s *state + po ParserOptions + cache parsedTermCache + recursionDepth int + maxRecursionDepth int } type parsedTermCacheItem struct { @@ -167,12 +179,19 @@ func (po *ParserOptions) EffectiveRegoVersion() RegoVersion { // NewParser creates and initializes a Parser. func NewParser() *Parser { p := &Parser{ - s: &state{}, - po: ParserOptions{}, + s: &state{}, + po: ParserOptions{}, + maxRecursionDepth: DefaultMaxParsingRecursionDepth, } return p } +// WithMaxRecursionDepth sets the maximum recursion depth for the parser. +func (p *Parser) WithMaxRecursionDepth(depth int) *Parser { + p.maxRecursionDepth = depth + return p +} + // WithFilename provides the filename for Location details // on parsed statements. func (p *Parser) WithFilename(filename string) *Parser { @@ -330,9 +349,7 @@ func (p *Parser) Parse() ([]Statement, []*Comment, Errors) { } // rego-v1 includes all v0 future keywords in the default language definition - for k, v := range futureKeywordsV0 { - allowedFutureKeywords[k] = v - } + maps.Copy(allowedFutureKeywords, futureKeywordsV0) for _, kw := range p.po.Capabilities.FutureKeywords { if tok, ok := futureKeywords[kw]; ok { @@ -380,9 +397,7 @@ func (p *Parser) Parse() ([]Statement, []*Comment, Errors) { if p.po.Capabilities.ContainsFeature(FeatureRegoV1) { // rego-v1 includes all v0 future keywords in the default language definition - for k, v := range futureKeywordsV0 { - allowedFutureKeywords[k] = v - } + maps.Copy(allowedFutureKeywords, futureKeywordsV0) } } @@ -400,9 +415,7 @@ func (p *Parser) Parse() ([]Statement, []*Comment, Errors) { selected := map[string]tokens.Token{} if p.po.AllFutureKeywords || p.po.EffectiveRegoVersion() == RegoV1 { - for kw, tok := range allowedFutureKeywords { - selected[kw] = tok - } + maps.Copy(selected, allowedFutureKeywords) } else { for _, kw := range p.po.FutureKeywords { tok, ok := allowedFutureKeywords[kw] @@ -979,7 +992,7 @@ func (p *Parser) parseHead(defaultRule bool) (*Head, bool) { ref = y } head = RefHead(ref) - head.Args = append([]*Term{}, args...) + head.Args = slices.Clone[[]*Term](args) default: return nil, false @@ -1035,6 +1048,10 @@ func (p *Parser) parseHead(defaultRule bool) (*Head, bool) { } func (p *Parser) parseBody(end tokens.Token) Body { + if !p.enter() { + return nil + } + defer p.leave() return p.parseQuery(false, end) } @@ -1360,10 +1377,20 @@ func (p *Parser) parseExpr() *Expr { // other binary operators (|, &, arithmetics), it constitutes the binding // precedence. func (p *Parser) parseTermInfixCall() *Term { + if !p.enter() { + return nil + } + defer p.leave() + return p.parseTermIn(nil, true, p.s.loc.Offset) } func (p *Parser) parseTermInfixCallInList() *Term { + if !p.enter() { + return nil + } + defer p.leave() + return p.parseTermIn(nil, false, p.s.loc.Offset) } @@ -1373,6 +1400,11 @@ var memberWithKeyRef = MemberWithKey.Ref() var memberRef = Member.Ref() func (p *Parser) parseTermIn(lhs *Term, keyVal bool, offset int) *Term { + if !p.enter() { + return nil + } + defer p.leave() + // NOTE(sr): `in` is a bit special: besides `lhs in rhs`, it also // supports `key, val in rhs`, so it can have an optional second lhs. // `keyVal` triggers if we attempt to parse a second lhs argument (`mhs`). @@ -1415,6 +1447,11 @@ func (p *Parser) parseTermIn(lhs *Term, keyVal bool, offset int) *Term { } func (p *Parser) parseTermRelation(lhs *Term, offset int) *Term { + if !p.enter() { + return nil + } + defer p.leave() + if lhs == nil { lhs = p.parseTermOr(nil, offset) } @@ -1435,6 +1472,11 @@ func (p *Parser) parseTermRelation(lhs *Term, offset int) *Term { } func (p *Parser) parseTermOr(lhs *Term, offset int) *Term { + if !p.enter() { + return nil + } + defer p.leave() + if lhs == nil { lhs = p.parseTermAnd(nil, offset) } @@ -1456,6 +1498,11 @@ func (p *Parser) parseTermOr(lhs *Term, offset int) *Term { } func (p *Parser) parseTermAnd(lhs *Term, offset int) *Term { + if !p.enter() { + return nil + } + defer p.leave() + if lhs == nil { lhs = p.parseTermArith(nil, offset) } @@ -1477,6 +1524,11 @@ func (p *Parser) parseTermAnd(lhs *Term, offset int) *Term { } func (p *Parser) parseTermArith(lhs *Term, offset int) *Term { + if !p.enter() { + return nil + } + defer p.leave() + if lhs == nil { lhs = p.parseTermFactor(nil, offset) } @@ -1497,6 +1549,11 @@ func (p *Parser) parseTermArith(lhs *Term, offset int) *Term { } func (p *Parser) parseTermFactor(lhs *Term, offset int) *Term { + if !p.enter() { + return nil + } + defer p.leave() + if lhs == nil { lhs = p.parseTerm() } @@ -1517,6 +1574,11 @@ func (p *Parser) parseTermFactor(lhs *Term, offset int) *Term { } func (p *Parser) parseTerm() *Term { + if !p.enter() { + return nil + } + defer p.leave() + if term, s := p.parsedTermCacheLookup(); s != nil { p.restore(s) return term @@ -1669,6 +1731,10 @@ func (p *Parser) parseRawString() *Term { var setConstructor = RefTerm(VarTerm("set")) func (p *Parser) parseCall(operator *Term, offset int) (term *Term) { + if !p.enter() { + return nil + } + defer p.leave() loc := operator.Location var end int @@ -1698,6 +1764,10 @@ func (p *Parser) parseCall(operator *Term, offset int) (term *Term) { } func (p *Parser) parseRef(head *Term, offset int) (term *Term) { + if !p.enter() { + return nil + } + defer p.leave() loc := head.Location var end int @@ -1763,6 +1833,10 @@ func (p *Parser) parseRef(head *Term, offset int) (term *Term) { } func (p *Parser) parseArray() (term *Term) { + if !p.enter() { + return nil + } + defer p.leave() loc := p.s.Loc() offset := p.s.loc.Offset @@ -1808,7 +1882,7 @@ func (p *Parser) parseArray() (term *Term) { case tokens.Comma: p.scan() if terms := p.parseTermList(tokens.RBrack, []*Term{head}); terms != nil { - return NewTerm(NewArray(terms...)) + return ArrayTerm(terms...) } return nil case tokens.Or: @@ -1828,12 +1902,17 @@ func (p *Parser) parseArray() (term *Term) { p.restore(s) if terms := p.parseTermList(tokens.RBrack, nil); terms != nil { - return NewTerm(NewArray(terms...)) + return ArrayTerm(terms...) } return nil } func (p *Parser) parseSetOrObject() (term *Term) { + if !p.enter() { + return nil + } + defer p.leave() + loc := p.s.Loc() offset := p.s.loc.Offset @@ -1900,6 +1979,11 @@ func (p *Parser) parseSetOrObject() (term *Term) { } func (p *Parser) parseSet(s *state, head *Term, potentialComprehension bool) *Term { + if !p.enter() { + return nil + } + defer p.leave() + switch p.s.tok { case tokens.RBrace: return SetTerm(head) @@ -1929,6 +2013,11 @@ func (p *Parser) parseSet(s *state, head *Term, potentialComprehension bool) *Te } func (p *Parser) parseObject(k *Term, potentialComprehension bool) *Term { + if !p.enter() { + return nil + } + defer p.leave() + // NOTE(tsandall): Assumption: this function is called after parsing the key // of the head element and then receiving a colon token from the scanner. // Advance beyond the colon and attempt to parse an object. @@ -1982,6 +2071,11 @@ func (p *Parser) parseObject(k *Term, potentialComprehension bool) *Term { } func (p *Parser) parseObjectFinish(key, val *Term, potentialComprehension bool) *Term { + if !p.enter() { + return nil + } + defer p.leave() + switch p.s.tok { case tokens.RBrace: return ObjectTerm([2]*Term{key, val}) @@ -2065,28 +2159,24 @@ func (p *Parser) parseTermPairList(end tokens.Token, r [][2]*Term) [][2]*Term { } func (p *Parser) parseTermOp(values ...tokens.Token) *Term { - for i := range values { - if p.s.tok == values[i] { - r := RefTerm(VarTerm(p.s.tok.String()).SetLocation(p.s.Loc())).SetLocation(p.s.Loc()) - p.scan() - return r - } + if slices.Contains(values, p.s.tok) { + r := RefTerm(VarTerm(p.s.tok.String()).SetLocation(p.s.Loc())).SetLocation(p.s.Loc()) + p.scan() + return r } return nil } func (p *Parser) parseTermOpName(ref Ref, values ...tokens.Token) *Term { - for i := range values { - if p.s.tok == values[i] { - cp := ref.Copy() - for _, r := range cp { - r.SetLocation(p.s.Loc()) - } - t := RefTerm(cp...) - t.SetLocation(p.s.Loc()) - p.scan() - return t + if slices.Contains(values, p.s.tok) { + cp := ref.Copy() + for _, r := range cp { + r.SetLocation(p.s.Loc()) } + t := RefTerm(cp...) + t.SetLocation(p.s.Loc()) + p.scan() + return t } return nil } @@ -2115,7 +2205,7 @@ func (p *Parser) error(loc *location.Location, reason string) { p.errorf(loc, reason) //nolint:govet } -func (p *Parser) errorf(loc *location.Location, f string, a ...interface{}) { +func (p *Parser) errorf(loc *location.Location, f string, a ...any) { msg := strings.Builder{} msg.WriteString(fmt.Sprintf(f, a...)) @@ -2145,11 +2235,11 @@ func (p *Parser) errorf(loc *location.Location, f string, a ...interface{}) { p.s.hints = nil } -func (p *Parser) hint(f string, a ...interface{}) { +func (p *Parser) hint(f string, a ...any) { p.s.hints = append(p.s.hints, fmt.Sprintf(f, a...)) } -func (p *Parser) illegal(note string, a ...interface{}) { +func (p *Parser) illegal(note string, a ...any) { tok := p.s.tok.String() if p.s.tok == tokens.Illegal { @@ -2251,8 +2341,8 @@ func (p *Parser) restore(s *state) { p.s = s } -func setLocRecursive(x interface{}, loc *location.Location) { - NewGenericVisitor(func(x interface{}) bool { +func setLocRecursive(x any, loc *location.Location) { + NewGenericVisitor(func(x any) bool { if node, ok := x.(Node); ok { node.SetLoc(loc) } @@ -2276,7 +2366,7 @@ func (p *Parser) validateDefaultRuleValue(rule *Rule) bool { } valid := true - vis := NewGenericVisitor(func(x interface{}) bool { + vis := NewGenericVisitor(func(x any) bool { switch x.(type) { case *ArrayComprehension, *ObjectComprehension, *SetComprehension: // skip closures return true @@ -2297,7 +2387,7 @@ func (p *Parser) validateDefaultRuleArgs(rule *Rule) bool { valid := true vars := NewVarSet() - vis := NewGenericVisitor(func(x interface{}) bool { + vis := NewGenericVisitor(func(x any) bool { switch x := x.(type) { case Var: if vars.Contains(x) { @@ -2327,15 +2417,15 @@ func (p *Parser) validateDefaultRuleArgs(rule *Rule) bool { // We explicitly use yaml unmarshalling, to accommodate for the '_' in 'related_resources', // which isn't handled properly by json for some reason. type rawAnnotation struct { - Scope string `yaml:"scope"` - Title string `yaml:"title"` - Entrypoint bool `yaml:"entrypoint"` - Description string `yaml:"description"` - Organizations []string `yaml:"organizations"` - RelatedResources []interface{} `yaml:"related_resources"` - Authors []interface{} `yaml:"authors"` - Schemas []map[string]any `yaml:"schemas"` - Custom map[string]interface{} `yaml:"custom"` + Scope string `yaml:"scope"` + Title string `yaml:"title"` + Entrypoint bool `yaml:"entrypoint"` + Description string `yaml:"description"` + Organizations []string `yaml:"organizations"` + RelatedResources []any `yaml:"related_resources"` + Authors []any `yaml:"authors"` + Schemas []map[string]any `yaml:"schemas"` + Custom map[string]any `yaml:"custom"` } type metadataParser struct { @@ -2440,7 +2530,7 @@ func (b *metadataParser) Parse() (*Annotations, error) { result.Authors = append(result.Authors, author) } - result.Custom = make(map[string]interface{}) + result.Custom = make(map[string]any) for k, v := range raw.Custom { val, err := convertYAMLMapKeyTypes(v, nil) if err != nil { @@ -2503,7 +2593,7 @@ func augmentYamlError(err error, comments []*Comment) error { return err } -func unwrapPair(pair map[string]interface{}) (string, interface{}) { +func unwrapPair(pair map[string]any) (string, any) { for k, v := range pair { return k, v } @@ -2534,7 +2624,7 @@ func parseSchemaRef(s string) (Ref, error) { return nil, errInvalidSchemaRef } -func parseRelatedResource(rr interface{}) (*RelatedResourceAnnotation, error) { +func parseRelatedResource(rr any) (*RelatedResourceAnnotation, error) { rr, err := convertYAMLMapKeyTypes(rr, nil) if err != nil { return nil, err @@ -2550,7 +2640,7 @@ func parseRelatedResource(rr interface{}) (*RelatedResourceAnnotation, error) { return &RelatedResourceAnnotation{Ref: *u}, nil } return nil, errors.New("ref URL may not be empty string") - case map[string]interface{}: + case map[string]any: description := strings.TrimSpace(getSafeString(rr, "description")) ref := strings.TrimSpace(getSafeString(rr, "ref")) if len(ref) > 0 { @@ -2566,7 +2656,7 @@ func parseRelatedResource(rr interface{}) (*RelatedResourceAnnotation, error) { return nil, errors.New("invalid value type, must be string or map") } -func parseAuthor(a interface{}) (*AuthorAnnotation, error) { +func parseAuthor(a any) (*AuthorAnnotation, error) { a, err := convertYAMLMapKeyTypes(a, nil) if err != nil { return nil, err @@ -2575,7 +2665,7 @@ func parseAuthor(a interface{}) (*AuthorAnnotation, error) { switch a := a.(type) { case string: return parseAuthorString(a) - case map[string]interface{}: + case map[string]any: name := strings.TrimSpace(getSafeString(a, "name")) email := strings.TrimSpace(getSafeString(a, "email")) if len(name) > 0 || len(email) > 0 { @@ -2587,7 +2677,7 @@ func parseAuthor(a interface{}) (*AuthorAnnotation, error) { return nil, errors.New("invalid value type, must be string or map") } -func getSafeString(m map[string]interface{}, k string) string { +func getSafeString(m map[string]any, k string) string { if v, found := m[k]; found { if s, ok := v.(string); ok { return s @@ -2688,7 +2778,7 @@ func IsFutureKeywordForRegoVersion(s string, v RegoVersion) bool { func (p *Parser) futureImport(imp *Import, allowedFutureKeywords map[string]tokens.Token) { path := imp.Path.Value.(Ref) - if len(path) == 1 || !path[1].Equal(keywordsTerm) { + if len(path) == 1 || !path[1].Equal(InternedStringTerm("keywords")) { p.errorf(imp.Path.Location, "invalid import, must be `future.keywords`") return } @@ -2764,10 +2854,24 @@ func (p *Parser) regoV1Import(imp *Import) { func init() { allFutureKeywords = map[string]tokens.Token{} - for k, v := range futureKeywords { - allFutureKeywords[k] = v - } - for k, v := range futureKeywordsV0 { - allFutureKeywords[k] = v - } + maps.Copy(allFutureKeywords, futureKeywords) + maps.Copy(allFutureKeywords, futureKeywordsV0) +} + +// enter increments the recursion depth counter and checks if it exceeds the maximum. +// Returns false if the maximum is exceeded, true otherwise. +// If p.maxRecursionDepth is 0 or negative, the check is effectively disabled. +func (p *Parser) enter() bool { + p.recursionDepth++ + if p.maxRecursionDepth > 0 && p.recursionDepth > p.maxRecursionDepth { + p.error(p.s.Loc(), ErrMaxParsingRecursionDepthExceeded.Error()) + p.recursionDepth-- + return false + } + return true +} + +// leave decrements the recursion depth counter. +func (p *Parser) leave() { + p.recursionDepth-- } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go b/vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go index dec06f196..42b050369 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/parser_ext.go @@ -14,6 +14,7 @@ import ( "bytes" "errors" "fmt" + "slices" "strings" "unicode" @@ -731,12 +732,7 @@ func parseModule(filename string, stmts []Statement, comments []*Comment, regoCo } func ruleDeclarationHasKeyword(rule *Rule, keyword tokens.Token) bool { - for _, kw := range rule.Head.keywords { - if kw == keyword { - return true - } - } - return false + return slices.Contains(rule.Head.keywords, keyword) } func newScopeAttachmentErr(a *Annotations, want string) *Error { @@ -809,10 +805,7 @@ func newParserErrorDetail(bs []byte, offset int) *ParserErrorDetail { func (d ParserErrorDetail) Lines() []string { line := strings.TrimLeft(d.Line, "\t") // remove leading tabs tabCount := len(d.Line) - len(line) - indent := d.Idx - tabCount - if indent < 0 { - indent = 0 - } + indent := max(d.Idx-tabCount, 0) return []string{line, strings.Repeat(" ", indent) + "^"} } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/policy.go b/vendor/github.com/open-policy-agent/opa/v1/ast/policy.go index 978de9441..fd669f1e7 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/policy.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/policy.go @@ -143,12 +143,7 @@ func IsKeyword(s string) bool { } func IsInKeywords(s string, keywords []string) bool { - for _, x := range keywords { - if x == s { - return true - } - } - return false + return slices.Contains(keywords, s) } // IsKeywordInRegoVersion returns true if s is a language keyword. @@ -265,12 +260,12 @@ type ( // Expr represents a single expression contained inside the body of a rule. Expr struct { - With []*With `json:"with,omitempty"` - Terms interface{} `json:"terms"` - Index int `json:"index"` - Generated bool `json:"generated,omitempty"` - Negated bool `json:"negated,omitempty"` - Location *Location `json:"location,omitempty"` + With []*With `json:"with,omitempty"` + Terms any `json:"terms"` + Index int `json:"index"` + Generated bool `json:"generated,omitempty"` + Negated bool `json:"negated,omitempty"` + Location *Location `json:"location,omitempty"` generatedFrom *Expr generates []*Expr @@ -537,7 +532,7 @@ func (pkg *Package) String() string { } func (pkg *Package) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + data := map[string]any{ "path": pkg.Path, } @@ -644,7 +639,7 @@ func (imp *Import) String() string { } func (imp *Import) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + data := map[string]any{ "path": imp.Path, } @@ -792,7 +787,7 @@ func (rule *Rule) isFunction() bool { } func (rule *Rule) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + data := map[string]any{ "head": rule.Head, "body": rule.Body, } @@ -1168,10 +1163,7 @@ func (body Body) Set(expr *Expr, pos int) { // // If body is a subset of other, it is considered less than (and vice versa). func (body Body) Compare(other Body) int { - minLen := len(body) - if len(other) < minLen { - minLen = len(other) - } + minLen := min(len(other), len(body)) for i := range minLen { if cmp := body[i].Compare(other[i]); cmp != 0 { return cmp @@ -1256,7 +1248,7 @@ func (body Body) Vars(params VarVisitorParams) VarSet { } // NewExpr returns a new Expr object. -func NewExpr(terms interface{}) *Expr { +func NewExpr(terms any) *Expr { switch terms.(type) { case *SomeDecl, *Every, *Term, []*Term: // ok default: @@ -1578,7 +1570,7 @@ func (expr *Expr) String() string { } func (expr *Expr) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + data := map[string]any{ "terms": expr.Terms, "index": expr.Index, } @@ -1606,7 +1598,7 @@ func (expr *Expr) MarshalJSON() ([]byte, error) { // UnmarshalJSON parses the byte array and stores the result in expr. func (expr *Expr) UnmarshalJSON(bs []byte) error { - v := map[string]interface{}{} + v := map[string]any{} if err := util.UnmarshalJSON(bs, &v); err != nil { return err } @@ -1710,7 +1702,7 @@ func (d *SomeDecl) Hash() int { } func (d *SomeDecl) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + data := map[string]any{ "symbols": d.Symbols, } @@ -1780,7 +1772,7 @@ func (q *Every) KeyValueVars() VarSet { } func (q *Every) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + data := map[string]any{ "key": q.Key, "value": q.Value, "domain": q.Domain, @@ -1855,7 +1847,7 @@ func (w *With) SetLoc(loc *Location) { } func (w *With) MarshalJSON() ([]byte, error) { - data := map[string]interface{}{ + data := map[string]any{ "target": w.Target, "value": w.Value, } @@ -1870,7 +1862,7 @@ func (w *With) MarshalJSON() ([]byte, error) { } // Copy returns a deep copy of the AST node x. If x is not an AST node, x is returned unmodified. -func Copy(x interface{}) interface{} { +func Copy(x any) any { switch x := x.(type) { case *Module: return x.Copy() diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/pretty.go b/vendor/github.com/open-policy-agent/opa/v1/ast/pretty.go index b4f05ad50..aa34f3747 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/pretty.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/pretty.go @@ -13,7 +13,7 @@ import ( // Pretty writes a pretty representation of the AST rooted at x to w. // // This is function is intended for debug purposes when inspecting ASTs. -func Pretty(w io.Writer, x interface{}) { +func Pretty(w io.Writer, x any) { pp := &prettyPrinter{ depth: -1, w: w, @@ -26,7 +26,7 @@ type prettyPrinter struct { w io.Writer } -func (pp *prettyPrinter) Before(x interface{}) bool { +func (pp *prettyPrinter) Before(x any) bool { switch x.(type) { case *Term: default: @@ -56,7 +56,7 @@ func (pp *prettyPrinter) Before(x interface{}) bool { return false } -func (pp *prettyPrinter) After(x interface{}) { +func (pp *prettyPrinter) After(x any) { switch x.(type) { case *Term: default: @@ -64,19 +64,19 @@ func (pp *prettyPrinter) After(x interface{}) { } } -func (pp *prettyPrinter) writeValue(x interface{}) { +func (pp *prettyPrinter) writeValue(x any) { pp.writeIndent(fmt.Sprint(x)) } -func (pp *prettyPrinter) writeType(x interface{}) { +func (pp *prettyPrinter) writeType(x any) { pp.writeIndent(TypeName(x)) } -func (pp *prettyPrinter) writeIndent(f string, a ...interface{}) { +func (pp *prettyPrinter) writeIndent(f string, a ...any) { pad := strings.Repeat(" ", pp.depth) pp.write(pad+f, a...) } -func (pp *prettyPrinter) write(f string, a ...interface{}) { +func (pp *prettyPrinter) write(f string, a ...any) { fmt.Fprintf(pp.w, f+"\n", a...) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/rego_v1.go b/vendor/github.com/open-policy-agent/opa/v1/ast/rego_v1.go index 8b757ecc3..883e026e1 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/rego_v1.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/rego_v1.go @@ -23,7 +23,7 @@ func checkDuplicateImports(modules []*Module) (errors Errors) { return } -func checkRootDocumentOverrides(node interface{}) Errors { +func checkRootDocumentOverrides(node any) Errors { errors := Errors{} WalkRules(node, func(rule *Rule) bool { @@ -64,8 +64,8 @@ func checkRootDocumentOverrides(node interface{}) Errors { return errors } -func walkCalls(node interface{}, f func(interface{}) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func walkCalls(node any, f func(any) bool) { + vis := &GenericVisitor{func(x any) bool { switch x := x.(type) { case Call: return f(x) @@ -82,10 +82,10 @@ func walkCalls(node interface{}, f func(interface{}) bool) { vis.Walk(node) } -func checkDeprecatedBuiltins(deprecatedBuiltinsMap map[string]struct{}, node interface{}) Errors { +func checkDeprecatedBuiltins(deprecatedBuiltinsMap map[string]struct{}, node any) Errors { errs := make(Errors, 0) - walkCalls(node, func(x interface{}) bool { + walkCalls(node, func(x any) bool { var operator string var loc *Location @@ -113,7 +113,7 @@ func checkDeprecatedBuiltins(deprecatedBuiltinsMap map[string]struct{}, node int return errs } -func checkDeprecatedBuiltinsForCurrentVersion(node interface{}) Errors { +func checkDeprecatedBuiltinsForCurrentVersion(node any) Errors { deprecatedBuiltins := make(map[string]struct{}) capabilities := CapabilitiesForThisVersion() for _, bi := range capabilities.Builtins { @@ -150,11 +150,11 @@ func NewRegoCheckOptions() RegoCheckOptions { // CheckRegoV1 checks the given module or rule for errors that are specific to Rego v1. // Passing something other than an *ast.Rule or *ast.Module is considered a programming error, and will cause a panic. -func CheckRegoV1(x interface{}) Errors { +func CheckRegoV1(x any) Errors { return CheckRegoV1WithOptions(x, NewRegoCheckOptions()) } -func CheckRegoV1WithOptions(x interface{}, opts RegoCheckOptions) Errors { +func CheckRegoV1WithOptions(x any, opts RegoCheckOptions) Errors { switch x := x.(type) { case *Module: return checkRegoV1Module(x, opts) diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/strings.go b/vendor/github.com/open-policy-agent/opa/v1/ast/strings.go index 40d66753f..844752241 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/strings.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/strings.go @@ -10,7 +10,7 @@ import ( ) // TypeName returns a human readable name for the AST element type. -func TypeName(x interface{}) string { +func TypeName(x any) string { if _, ok := x.(*lazyObj); ok { return "object" } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/term.go b/vendor/github.com/open-policy-agent/opa/v1/ast/term.go index e0fda51e8..0013a6b13 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/term.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/term.go @@ -19,9 +19,9 @@ import ( "strconv" "strings" "sync" + "unicode" "github.com/cespare/xxhash/v2" - astJSON "github.com/open-policy-agent/opa/v1/ast/json" "github.com/open-policy-agent/opa/v1/ast/location" "github.com/open-policy-agent/opa/v1/util" @@ -54,7 +54,7 @@ type Value interface { } // InterfaceToValue converts a native Go value x to a Value. -func InterfaceToValue(x interface{}) (Value, error) { +func InterfaceToValue(x any) (Value, error) { switch x := x.(type) { case Value: return x, nil @@ -127,7 +127,7 @@ func InterfaceToValue(x interface{}) (Value, error) { // ValueFromReader returns an AST value from a JSON serialized value in the reader. func ValueFromReader(r io.Reader) (Value, error) { - var x interface{} + var x any if err := util.NewJSONDecoder(r).Decode(&x); err != nil { return nil, err } @@ -135,13 +135,13 @@ func ValueFromReader(r io.Reader) (Value, error) { } // As converts v into a Go native type referred to by x. -func As(v Value, x interface{}) error { +func As(v Value, x any) error { return util.NewJSONDecoder(strings.NewReader(v.String())).Decode(x) } // Resolver defines the interface for resolving references to native Go values. type Resolver interface { - Resolve(Ref) (interface{}, error) + Resolve(Ref) (any, error) } // ValueResolver defines the interface for resolving references to AST values. @@ -165,18 +165,18 @@ func IsUnknownValueErr(err error) bool { type illegalResolver struct{} -func (illegalResolver) Resolve(ref Ref) (interface{}, error) { +func (illegalResolver) Resolve(ref Ref) (any, error) { return nil, fmt.Errorf("illegal value: %v", ref) } // ValueToInterface returns the Go representation of an AST value. The AST // value should not contain any values that require evaluation (e.g., vars, // comprehensions, etc.) -func ValueToInterface(v Value, resolver Resolver) (interface{}, error) { +func ValueToInterface(v Value, resolver Resolver) (any, error) { return valueToInterface(v, resolver, JSONOpt{}) } -func valueToInterface(v Value, resolver Resolver, opt JSONOpt) (interface{}, error) { +func valueToInterface(v Value, resolver Resolver, opt JSONOpt) (any, error) { switch v := v.(type) { case Null: return nil, nil @@ -187,7 +187,7 @@ func valueToInterface(v Value, resolver Resolver, opt JSONOpt) (interface{}, err case String: return string(v), nil case *Array: - buf := []interface{}{} + buf := []any{} for i := range v.Len() { x1, err := valueToInterface(v.Elem(i).Value, resolver, opt) if err != nil { @@ -197,7 +197,7 @@ func valueToInterface(v Value, resolver Resolver, opt JSONOpt) (interface{}, err } return buf, nil case *object: - buf := make(map[string]interface{}, v.Len()) + buf := make(map[string]any, v.Len()) err := v.Iter(func(k, v *Term) error { ki, err := valueToInterface(k.Value, resolver, opt) if err != nil { @@ -229,7 +229,7 @@ func valueToInterface(v Value, resolver Resolver, opt JSONOpt) (interface{}, err } return v.native, nil case Set: - buf := []interface{}{} + buf := []any{} iter := func(x *Term) error { x1, err := valueToInterface(x.Value, resolver, opt) if err != nil { @@ -257,19 +257,19 @@ func valueToInterface(v Value, resolver Resolver, opt JSONOpt) (interface{}, err // JSON returns the JSON representation of v. The value must not contain any // refs or terms that require evaluation (e.g., vars, comprehensions, etc.) -func JSON(v Value) (interface{}, error) { +func JSON(v Value) (any, error) { return JSONWithOpt(v, JSONOpt{}) } // JSONOpt defines parameters for AST to JSON conversion. type JSONOpt struct { SortSets bool // sort sets before serializing (this makes conversion more expensive) - CopyMaps bool // enforces copying of map[string]interface{} read from the store + CopyMaps bool // enforces copying of map[string]any read from the store } // JSONWithOpt returns the JSON representation of v. The value must not contain any // refs or terms that require evaluation (e.g., vars, comprehensions, etc.) -func JSONWithOpt(v Value, opt JSONOpt) (interface{}, error) { +func JSONWithOpt(v Value, opt JSONOpt) (any, error) { return valueToInterface(v, illegalResolver{}, opt) } @@ -277,7 +277,7 @@ func JSONWithOpt(v Value, opt JSONOpt) (interface{}, error) { // refs or terms that require evaluation (e.g., vars, comprehensions, etc.) If // the conversion fails, this function will panic. This function is mostly for // test purposes. -func MustJSON(v Value) interface{} { +func MustJSON(v Value) any { r, err := JSON(v) if err != nil { panic(err) @@ -288,7 +288,7 @@ func MustJSON(v Value) interface{} { // MustInterfaceToValue converts a native Go value x to a Value. If the // conversion fails, this function will panic. This function is mostly for test // purposes. -func MustInterfaceToValue(x interface{}) Value { +func MustInterfaceToValue(x any) Value { v, err := InterfaceToValue(x) if err != nil { panic(err) @@ -410,7 +410,7 @@ func (term *Term) IsGround() bool { // // Specialized marshalling logic is required to include a type hint for Value. func (term *Term) MarshalJSON() ([]byte, error) { - d := map[string]interface{}{ + d := map[string]any{ "type": ValueName(term.Value), "value": term.Value, } @@ -430,7 +430,7 @@ func (term *Term) String() string { // UnmarshalJSON parses the byte array and stores the result in term. // Specialized unmarshalling is required to handle Value and Location. func (term *Term) UnmarshalJSON(bs []byte) error { - v := map[string]interface{}{} + v := map[string]any{} if err := util.UnmarshalJSON(bs, &v); err != nil { return err } @@ -440,7 +440,7 @@ func (term *Term) UnmarshalJSON(bs []byte) error { } term.Value = val - if loc, ok := v["location"].(map[string]interface{}); ok { + if loc, ok := v["location"].(map[string]any); ok { term.Location = &Location{} err := unmarshalLocation(term.Location, loc) if err != nil { @@ -461,7 +461,7 @@ func (term *Term) Vars() VarSet { func IsConstant(v Value) bool { found := false vis := GenericVisitor{ - func(x interface{}) bool { + func(x any) bool { switch x.(type) { case Var, Ref, *ArrayComprehension, *ObjectComprehension, *SetComprehension, Call: found = true @@ -484,7 +484,7 @@ func IsComprehension(x Value) bool { } // ContainsRefs returns true if the Value v contains refs. -func ContainsRefs(v interface{}) bool { +func ContainsRefs(v any) bool { found := false WalkRefs(v, func(Ref) bool { found = true @@ -494,9 +494,9 @@ func ContainsRefs(v interface{}) bool { } // ContainsComprehensions returns true if the Value v contains comprehensions. -func ContainsComprehensions(v interface{}) bool { +func ContainsComprehensions(v any) bool { found := false - WalkClosures(v, func(x interface{}) bool { + WalkClosures(v, func(x any) bool { switch x.(type) { case *ArrayComprehension, *ObjectComprehension, *SetComprehension: found = true @@ -508,9 +508,9 @@ func ContainsComprehensions(v interface{}) bool { } // ContainsClosures returns true if the Value v contains closures. -func ContainsClosures(v interface{}) bool { +func ContainsClosures(v any) bool { found := false - WalkClosures(v, func(x interface{}) bool { + WalkClosures(v, func(x any) bool { switch x.(type) { case *ArrayComprehension, *ObjectComprehension, *SetComprehension, *Every: found = true @@ -804,7 +804,7 @@ func (str String) Equal(other Value) bool { func (str String) Compare(other Value) int { // Optimize for the common case of one string being compared to another by // using a direct comparison of values. This avoids the allocation performed - // when calling Compare and its interface{} argument conversion. + // when calling Compare and its any argument conversion. if otherStr, ok := other.(String); ok { if str == otherStr { return 0 @@ -928,7 +928,7 @@ func PtrRef(head *Term, s string) (Ref, error) { } ref := make(Ref, uint(len(parts))+1) ref[0] = head - for i := 0; i < len(parts); i++ { + for i := range parts { var err error parts[i], err = url.PathUnescape(parts[i]) if err != nil { @@ -1180,11 +1180,17 @@ func (ref Ref) String() string { return "" } + if len(ref) == 1 { + switch p := ref[0].Value.(type) { + case Var: + return p.String() + } + } + sb := sbPool.Get() defer sbPool.Put(sb) sb.Grow(10 * len(ref)) - sb.WriteString(ref[0].Value.String()) for _, p := range ref[1:] { @@ -1195,9 +1201,17 @@ func (ref Ref) String() string { sb.WriteByte('.') sb.WriteString(str) } else { - sb.WriteString(`["`) - sb.WriteString(str) - sb.WriteString(`"]`) + sb.WriteByte('[') + // Determine whether we need the full JSON-escaped form + if strings.ContainsFunc(str, isControlOrBackslash) { + // only now pay the cost of expensive JSON-escaped form + sb.WriteString(p.String()) + } else { + sb.WriteByte('"') + sb.WriteString(str) + sb.WriteByte('"') + } + sb.WriteByte(']') } default: sb.WriteByte('[') @@ -1218,15 +1232,15 @@ func (ref Ref) OutputVars() VarSet { } func (ref Ref) toArray() *Array { - a := NewArray() + terms := make([]*Term, 0, len(ref)) for _, term := range ref { if _, ok := term.Value.(String); ok { - a = a.Append(term) + terms = append(terms, term) } else { - a = a.Append(StringTerm(term.Value.String())) + terms = append(terms, InternedStringTerm(term.Value.String())) } } - return a + return NewArray(terms...) } // QueryIterator defines the interface for querying AST documents with references. @@ -1470,12 +1484,7 @@ func (arr *Array) Iter(f func(*Term) error) error { // Until calls f on each element in arr. If f returns true, iteration stops. func (arr *Array) Until(f func(*Term) bool) bool { - for _, term := range arr.elems { - if f(term) { - return true - } - } - return false + return slices.ContainsFunc(arr.elems, f) } // Foreach calls f on each element in arr. @@ -1635,6 +1644,10 @@ func (s *set) Find(path Ref) (Value, error) { // Diff returns elements in s that are not in other. func (s *set) Diff(other Set) Set { + if s.Compare(other) == 0 { + return NewSet() + } + terms := make([]*Term, 0, len(s.keys)) for _, term := range s.sortedKeys() { if !other.Contains(term) { @@ -1693,12 +1706,7 @@ func (s *set) Iter(f func(*Term) error) error { // Until calls f on each element in s. If f returns true, iteration stops. func (s *set) Until(f func(*Term) bool) bool { - for _, term := range s.sortedKeys() { - if f(term) { - return true - } - } - return false + return slices.ContainsFunc(s.sortedKeys(), f) } // Foreach calls f on each element in s. @@ -2000,14 +2008,14 @@ func ObjectTerm(o ...[2]*Term) *Term { return &Term{Value: NewObject(o...)} } -func LazyObject(blob map[string]interface{}) Object { +func LazyObject(blob map[string]any) Object { return &lazyObj{native: blob, cache: map[string]Value{}} } type lazyObj struct { strict Object cache map[string]Value - native map[string]interface{} + native map[string]any } func (l *lazyObj) force() Object { @@ -2104,7 +2112,7 @@ func (l *lazyObj) Get(k *Term) *Term { if val, ok := l.native[string(s)]; ok { var converted Value switch val := val.(type) { - case map[string]interface{}: + case map[string]any: converted = LazyObject(val) default: converted = MustInterfaceToValue(val) @@ -2173,7 +2181,7 @@ func (l *lazyObj) Find(path Ref) (Value, error) { if v, ok := l.native[string(p0)]; ok { var converted Value switch v := v.(type) { - case map[string]interface{}: + case map[string]any: converted = LazyObject(v) default: converted = MustInterfaceToValue(v) @@ -2763,7 +2771,7 @@ func filterObject(o Value, filter Value) (Value, error) { case *Array: values := NewArray() for i := range v.Len() { - subFilter := filteredObj.Get(StringTerm(strconv.Itoa(i))) + subFilter := filteredObj.Get(InternedIntegerString(i)) if subFilter != nil { filteredValue, err := filterObject(v.Elem(i).Value, subFilter.Value) if err != nil { @@ -3101,6 +3109,11 @@ func termSliceIsGround(a []*Term) bool { return true } +// Detect when String() need to use expensive JSON‐escaped form +func isControlOrBackslash(r rune) bool { + return r == '\\' || unicode.IsControl(r) +} + // NOTE(tsandall): The unmarshalling errors in these functions are not // helpful for callers because they do not identify the source of the // unmarshalling error. Because OPA doesn't accept JSON describing ASTs @@ -3109,10 +3122,10 @@ func termSliceIsGround(a []*Term) bool { // on the happy path and treats all errors the same. If better error // reporting is needed, the error paths will need to be fleshed out. -func unmarshalBody(b []interface{}) (Body, error) { +func unmarshalBody(b []any) (Body, error) { buf := Body{} for _, e := range b { - if m, ok := e.(map[string]interface{}); ok { + if m, ok := e.(map[string]any); ok { expr := &Expr{} if err := unmarshalExpr(expr, m); err == nil { buf = append(buf, expr) @@ -3126,7 +3139,7 @@ unmarshal_error: return nil, errors.New("ast: unable to unmarshal body") } -func unmarshalExpr(expr *Expr, v map[string]interface{}) error { +func unmarshalExpr(expr *Expr, v map[string]any) error { if x, ok := v["negated"]; ok { if b, ok := x.(bool); ok { expr.Negated = b @@ -3146,13 +3159,13 @@ func unmarshalExpr(expr *Expr, v map[string]interface{}) error { return err } switch ts := v["terms"].(type) { - case map[string]interface{}: + case map[string]any: t, err := unmarshalTerm(ts) if err != nil { return err } expr.Terms = t - case []interface{}: + case []any: terms, err := unmarshalTermSlice(ts) if err != nil { return err @@ -3162,7 +3175,7 @@ func unmarshalExpr(expr *Expr, v map[string]interface{}) error { return fmt.Errorf(`ast: unable to unmarshal terms field with type: %T (expected {"value": ..., "type": ...} or [{"value": ..., "type": ...}, ...])`, v["terms"]) } if x, ok := v["with"]; ok { - if sl, ok := x.([]interface{}); ok { + if sl, ok := x.([]any); ok { ws := make([]*With, len(sl)) for i := range sl { var err error @@ -3174,7 +3187,7 @@ func unmarshalExpr(expr *Expr, v map[string]interface{}) error { expr.With = ws } } - if loc, ok := v["location"].(map[string]interface{}); ok { + if loc, ok := v["location"].(map[string]any); ok { expr.Location = &Location{} if err := unmarshalLocation(expr.Location, loc); err != nil { return err @@ -3183,7 +3196,7 @@ func unmarshalExpr(expr *Expr, v map[string]interface{}) error { return nil } -func unmarshalLocation(loc *Location, v map[string]interface{}) error { +func unmarshalLocation(loc *Location, v map[string]any) error { if x, ok := v["file"]; ok { if s, ok := x.(string); ok { loc.File = s @@ -3217,7 +3230,7 @@ func unmarshalLocation(loc *Location, v map[string]interface{}) error { return nil } -func unmarshalExprIndex(expr *Expr, v map[string]interface{}) error { +func unmarshalExprIndex(expr *Expr, v map[string]any) error { if x, ok := v["index"]; ok { if n, ok := x.(json.Number); ok { i, err := n.Int64() @@ -3230,7 +3243,7 @@ func unmarshalExprIndex(expr *Expr, v map[string]interface{}) error { return fmt.Errorf("ast: unable to unmarshal index field with type: %T (expected integer)", v["index"]) } -func unmarshalTerm(m map[string]interface{}) (*Term, error) { +func unmarshalTerm(m map[string]any) (*Term, error) { var term Term v, err := unmarshalValue(m) @@ -3239,7 +3252,7 @@ func unmarshalTerm(m map[string]interface{}) (*Term, error) { } term.Value = v - if loc, ok := m["location"].(map[string]interface{}); ok { + if loc, ok := m["location"].(map[string]any); ok { term.Location = &Location{} if err := unmarshalLocation(term.Location, loc); err != nil { return nil, err @@ -3249,10 +3262,10 @@ func unmarshalTerm(m map[string]interface{}) (*Term, error) { return &term, nil } -func unmarshalTermSlice(s []interface{}) ([]*Term, error) { +func unmarshalTermSlice(s []any) ([]*Term, error) { buf := []*Term{} for _, x := range s { - if m, ok := x.(map[string]interface{}); ok { + if m, ok := x.(map[string]any); ok { t, err := unmarshalTerm(m) if err == nil { buf = append(buf, t) @@ -3265,19 +3278,19 @@ func unmarshalTermSlice(s []interface{}) ([]*Term, error) { return buf, nil } -func unmarshalTermSliceValue(d map[string]interface{}) ([]*Term, error) { - if s, ok := d["value"].([]interface{}); ok { +func unmarshalTermSliceValue(d map[string]any) ([]*Term, error) { + if s, ok := d["value"].([]any); ok { return unmarshalTermSlice(s) } return nil, errors.New(`ast: unable to unmarshal term (expected {"value": [...], "type": ...} where type is one of: ref, array, or set)`) } -func unmarshalWith(i interface{}) (*With, error) { - if m, ok := i.(map[string]interface{}); ok { - tgt, _ := m["target"].(map[string]interface{}) +func unmarshalWith(i any) (*With, error) { + if m, ok := i.(map[string]any); ok { + tgt, _ := m["target"].(map[string]any) target, err := unmarshalTerm(tgt) if err == nil { - val, _ := m["value"].(map[string]interface{}) + val, _ := m["value"].(map[string]any) value, err := unmarshalTerm(val) if err == nil { return &With{ @@ -3292,7 +3305,7 @@ func unmarshalWith(i interface{}) (*With, error) { return nil, errors.New(`ast: unable to unmarshal with modifier (expected {"target": {...}, "value": {...}})`) } -func unmarshalValue(d map[string]interface{}) (Value, error) { +func unmarshalValue(d map[string]any) (Value, error) { v := d["value"] switch d["type"] { case "null": @@ -3326,10 +3339,10 @@ func unmarshalValue(d map[string]interface{}) (Value, error) { return NewSet(s...), nil } case "object": - if s, ok := v.([]interface{}); ok { + if s, ok := v.([]any); ok { buf := NewObject() for _, x := range s { - if i, ok := x.([]interface{}); ok && len(i) == 2 { + if i, ok := x.([]any); ok && len(i) == 2 { p, err := unmarshalTermSlice(i) if err == nil { buf.Insert(p[0], p[1]) @@ -3341,8 +3354,8 @@ func unmarshalValue(d map[string]interface{}) (Value, error) { return buf, nil } case "arraycomprehension", "setcomprehension": - if m, ok := v.(map[string]interface{}); ok { - t, ok := m["term"].(map[string]interface{}) + if m, ok := v.(map[string]any); ok { + t, ok := m["term"].(map[string]any) if !ok { goto unmarshal_error } @@ -3352,7 +3365,7 @@ func unmarshalValue(d map[string]interface{}) (Value, error) { goto unmarshal_error } - b, ok := m["body"].([]interface{}) + b, ok := m["body"].([]any) if !ok { goto unmarshal_error } @@ -3368,8 +3381,8 @@ func unmarshalValue(d map[string]interface{}) (Value, error) { return &SetComprehension{Term: term, Body: body}, nil } case "objectcomprehension": - if m, ok := v.(map[string]interface{}); ok { - k, ok := m["key"].(map[string]interface{}) + if m, ok := v.(map[string]any); ok { + k, ok := m["key"].(map[string]any) if !ok { goto unmarshal_error } @@ -3379,7 +3392,7 @@ func unmarshalValue(d map[string]interface{}) (Value, error) { goto unmarshal_error } - v, ok := m["value"].(map[string]interface{}) + v, ok := m["value"].(map[string]any) if !ok { goto unmarshal_error } @@ -3389,7 +3402,7 @@ func unmarshalValue(d map[string]interface{}) (Value, error) { goto unmarshal_error } - b, ok := m["body"].([]interface{}) + b, ok := m["body"].([]any) if !ok { goto unmarshal_error } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/transform.go b/vendor/github.com/open-policy-agent/opa/v1/ast/transform.go index e8c9ddcab..197ab6457 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/transform.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/transform.go @@ -13,12 +13,12 @@ import ( // be set to nil and no transformations will be applied to children of the // element. type Transformer interface { - Transform(interface{}) (interface{}, error) + Transform(any) (any, error) } // Transform iterates the AST and calls the Transform function on the // Transformer t for x before recursing. -func Transform(t Transformer, x interface{}) (interface{}, error) { +func Transform(t Transformer, x any) (any, error) { if term, ok := x.(*Term); ok { return Transform(t, term.Value) @@ -290,8 +290,8 @@ func Transform(t Transformer, x interface{}) (interface{}, error) { } // TransformRefs calls the function f on all references under x. -func TransformRefs(x interface{}, f func(Ref) (Value, error)) (interface{}, error) { - t := &GenericTransformer{func(x interface{}) (interface{}, error) { +func TransformRefs(x any, f func(Ref) (Value, error)) (any, error) { + t := &GenericTransformer{func(x any) (any, error) { if r, ok := x.(Ref); ok { return f(r) } @@ -301,8 +301,8 @@ func TransformRefs(x interface{}, f func(Ref) (Value, error)) (interface{}, erro } // TransformVars calls the function f on all vars under x. -func TransformVars(x interface{}, f func(Var) (Value, error)) (interface{}, error) { - t := &GenericTransformer{func(x interface{}) (interface{}, error) { +func TransformVars(x any, f func(Var) (Value, error)) (any, error) { + t := &GenericTransformer{func(x any) (any, error) { if v, ok := x.(Var); ok { return f(v) } @@ -312,8 +312,8 @@ func TransformVars(x interface{}, f func(Var) (Value, error)) (interface{}, erro } // TransformComprehensions calls the functio nf on all comprehensions under x. -func TransformComprehensions(x interface{}, f func(interface{}) (Value, error)) (interface{}, error) { - t := &GenericTransformer{func(x interface{}) (interface{}, error) { +func TransformComprehensions(x any, f func(any) (Value, error)) (any, error) { + t := &GenericTransformer{func(x any) (any, error) { switch x := x.(type) { case *ArrayComprehension: return f(x) @@ -330,19 +330,19 @@ func TransformComprehensions(x interface{}, f func(interface{}) (Value, error)) // GenericTransformer implements the Transformer interface to provide a utility // to transform AST nodes using a closure. type GenericTransformer struct { - f func(interface{}) (interface{}, error) + f func(any) (any, error) } // NewGenericTransformer returns a new GenericTransformer that will transform // AST nodes using the function f. -func NewGenericTransformer(f func(x interface{}) (interface{}, error)) *GenericTransformer { +func NewGenericTransformer(f func(x any) (any, error)) *GenericTransformer { return &GenericTransformer{ f: f, } } // Transform calls the function f on the GenericTransformer. -func (t *GenericTransformer) Transform(x interface{}) (interface{}, error) { +func (t *GenericTransformer) Transform(x any) (any, error) { return t.f(x) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ast/visit.go b/vendor/github.com/open-policy-agent/opa/v1/ast/visit.go index 0115c4f45..16567014f 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ast/visit.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ast/visit.go @@ -10,7 +10,7 @@ package ast // visited. // Deprecated: use GenericVisitor or another visitor implementation type Visitor interface { - Visit(v interface{}) (w Visitor) + Visit(v any) (w Visitor) } // BeforeAndAfterVisitor wraps Visitor to provide hooks for being called before @@ -18,14 +18,14 @@ type Visitor interface { // Deprecated: use GenericVisitor or another visitor implementation type BeforeAndAfterVisitor interface { Visitor - Before(x interface{}) - After(x interface{}) + Before(x any) + After(x any) } // Walk iterates the AST by calling the Visit function on the Visitor // v for x before recursing. // Deprecated: use GenericVisitor.Walk -func Walk(v Visitor, x interface{}) { +func Walk(v Visitor, x any) { if bav, ok := v.(BeforeAndAfterVisitor); !ok { walk(v, x) } else { @@ -38,11 +38,11 @@ func Walk(v Visitor, x interface{}) { // WalkBeforeAndAfter iterates the AST by calling the Visit function on the // Visitor v for x before recursing. // Deprecated: use GenericVisitor.Walk -func WalkBeforeAndAfter(v BeforeAndAfterVisitor, x interface{}) { +func WalkBeforeAndAfter(v BeforeAndAfterVisitor, x any) { Walk(v, x) } -func walk(v Visitor, x interface{}) { +func walk(v Visitor, x any) { w := v.Visit(x) if w == nil { return @@ -154,8 +154,8 @@ func walk(v Visitor, x interface{}) { // WalkVars calls the function f on all vars under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkVars(x interface{}, f func(Var) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkVars(x any, f func(Var) bool) { + vis := &GenericVisitor{func(x any) bool { if v, ok := x.(Var); ok { return f(v) } @@ -166,8 +166,8 @@ func WalkVars(x interface{}, f func(Var) bool) { // WalkClosures calls the function f on all closures under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkClosures(x interface{}, f func(interface{}) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkClosures(x any, f func(any) bool) { + vis := &GenericVisitor{func(x any) bool { switch x := x.(type) { case *ArrayComprehension, *ObjectComprehension, *SetComprehension, *Every: return f(x) @@ -179,8 +179,8 @@ func WalkClosures(x interface{}, f func(interface{}) bool) { // WalkRefs calls the function f on all references under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkRefs(x interface{}, f func(Ref) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkRefs(x any, f func(Ref) bool) { + vis := &GenericVisitor{func(x any) bool { if r, ok := x.(Ref); ok { return f(r) } @@ -191,8 +191,8 @@ func WalkRefs(x interface{}, f func(Ref) bool) { // WalkTerms calls the function f on all terms under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkTerms(x interface{}, f func(*Term) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkTerms(x any, f func(*Term) bool) { + vis := &GenericVisitor{func(x any) bool { if term, ok := x.(*Term); ok { return f(term) } @@ -203,8 +203,8 @@ func WalkTerms(x interface{}, f func(*Term) bool) { // WalkWiths calls the function f on all with modifiers under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkWiths(x interface{}, f func(*With) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkWiths(x any, f func(*With) bool) { + vis := &GenericVisitor{func(x any) bool { if w, ok := x.(*With); ok { return f(w) } @@ -215,8 +215,8 @@ func WalkWiths(x interface{}, f func(*With) bool) { // WalkExprs calls the function f on all expressions under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkExprs(x interface{}, f func(*Expr) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkExprs(x any, f func(*Expr) bool) { + vis := &GenericVisitor{func(x any) bool { if r, ok := x.(*Expr); ok { return f(r) } @@ -227,8 +227,8 @@ func WalkExprs(x interface{}, f func(*Expr) bool) { // WalkBodies calls the function f on all bodies under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkBodies(x interface{}, f func(Body) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkBodies(x any, f func(Body) bool) { + vis := &GenericVisitor{func(x any) bool { if b, ok := x.(Body); ok { return f(b) } @@ -239,8 +239,8 @@ func WalkBodies(x interface{}, f func(Body) bool) { // WalkRules calls the function f on all rules under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkRules(x interface{}, f func(*Rule) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkRules(x any, f func(*Rule) bool) { + vis := &GenericVisitor{func(x any) bool { if r, ok := x.(*Rule); ok { stop := f(r) // NOTE(tsandall): since rules cannot be embedded inside of queries @@ -256,8 +256,8 @@ func WalkRules(x interface{}, f func(*Rule) bool) { // WalkNodes calls the function f on all nodes under x. If the function f // returns true, AST nodes under the last node will not be visited. -func WalkNodes(x interface{}, f func(Node) bool) { - vis := &GenericVisitor{func(x interface{}) bool { +func WalkNodes(x any, f func(Node) bool) { + vis := &GenericVisitor{func(x any) bool { if n, ok := x.(Node); ok { return f(n) } @@ -270,19 +270,19 @@ func WalkNodes(x interface{}, f func(Node) bool) { // closure. If the closure returns true, the visitor will not walk // over AST nodes under x. type GenericVisitor struct { - f func(x interface{}) bool + f func(x any) bool } // NewGenericVisitor returns a new GenericVisitor that will invoke the function // f on AST nodes. -func NewGenericVisitor(f func(x interface{}) bool) *GenericVisitor { +func NewGenericVisitor(f func(x any) bool) *GenericVisitor { return &GenericVisitor{f} } // Walk iterates the AST by calling the function f on the // GenericVisitor before recursing. Contrary to the generic Walk, this // does not require allocating the visitor from heap. -func (vis *GenericVisitor) Walk(x interface{}) { +func (vis *GenericVisitor) Walk(x any) { if vis.f(x) { return } @@ -403,13 +403,13 @@ func (vis *GenericVisitor) Walk(x interface{}) { // walk over AST nodes under x. The after closure is invoked always // after visiting a node. type BeforeAfterVisitor struct { - before func(x interface{}) bool - after func(x interface{}) + before func(x any) bool + after func(x any) } // NewBeforeAfterVisitor returns a new BeforeAndAfterVisitor that // will invoke the functions before and after AST nodes. -func NewBeforeAfterVisitor(before func(x interface{}) bool, after func(x interface{})) *BeforeAfterVisitor { +func NewBeforeAfterVisitor(before func(x any) bool, after func(x any)) *BeforeAfterVisitor { return &BeforeAfterVisitor{before, after} } @@ -417,7 +417,7 @@ func NewBeforeAfterVisitor(before func(x interface{}) bool, after func(x interfa // BeforeAndAfterVisitor before and after recursing. Contrary to the // generic Walk, this does not require allocating the visitor from // heap. -func (vis *BeforeAfterVisitor) Walk(x interface{}) { +func (vis *BeforeAfterVisitor) Walk(x any) { defer vis.after(x) if vis.before(x) { return @@ -576,7 +576,7 @@ func (vis *VarVisitor) Vars() VarSet { // visit determines if the VarVisitor will recurse into x: if it returns `true`, // the visitor will _skip_ that branch of the AST -func (vis *VarVisitor) visit(v interface{}) bool { +func (vis *VarVisitor) visit(v any) bool { if vis.params.SkipObjectKeys { if o, ok := v.(Object); ok { o.Foreach(func(_, v *Term) { @@ -669,7 +669,7 @@ func (vis *VarVisitor) visit(v interface{}) bool { // Walk iterates the AST by calling the function f on the // GenericVisitor before recursing. Contrary to the generic Walk, this // does not require allocating the visitor from heap. -func (vis *VarVisitor) Walk(x interface{}) { +func (vis *VarVisitor) Walk(x any) { if vis.visit(x) { return } diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/bundle.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/bundle.go index d6bf846fc..865e4b64b 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/bundle/bundle.go +++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/bundle.go @@ -14,6 +14,7 @@ import ( "errors" "fmt" "io" + "maps" "net/url" "os" "path" @@ -52,7 +53,7 @@ const ( type Bundle struct { Signatures SignaturesConfig Manifest Manifest - Data map[string]interface{} + Data map[string]any Modules []ModuleFile Wasm []byte // Deprecated. Use WasmModules instead WasmModules []WasmModuleFile @@ -80,9 +81,9 @@ type Patch struct { // PatchOperation models a single patch operation against a document. type PatchOperation struct { - Op string `json:"op"` - Path string `json:"path"` - Value interface{} `json:"value"` + Op string `json:"op"` + Path string `json:"path"` + Value any `json:"value"` } // SignaturesConfig represents an array of JWTs that encapsulate the signatures for the bundle. @@ -137,8 +138,8 @@ type Manifest struct { RegoVersion *int `json:"rego_version,omitempty"` // FileRegoVersions is a map from file paths to Rego versions. // This allows individual files to override the global Rego version specified by RegoVersion. - FileRegoVersions map[string]int `json:"file_rego_versions,omitempty"` - Metadata map[string]interface{} `json:"metadata,omitempty"` + FileRegoVersions map[string]int `json:"file_rego_versions,omitempty"` + Metadata map[string]any `json:"metadata,omitempty"` compiledFileRegoVersions []fileRegoVersion } @@ -233,10 +234,8 @@ func (m Manifest) Copy() Manifest { metadata := m.Metadata if metadata != nil { - m.Metadata = make(map[string]interface{}) - for k, v := range metadata { - m.Metadata[k] = v - } + m.Metadata = make(map[string]any) + maps.Copy(m.Metadata, metadata) } return m @@ -391,13 +390,13 @@ func (m *Manifest) validateAndInjectDefaults(b Bundle) error { } // Validate data in bundle. - return dfs(b.Data, "", func(path string, node interface{}) (bool, error) { + return dfs(b.Data, "", func(path string, node any) (bool, error) { path = strings.Trim(path, "/") if RootPathsContain(roots, path) { return true, nil } - if _, ok := node.(map[string]interface{}); ok { + if _, ok := node.(map[string]any); ok { for i := range roots { if RootPathsContain(strings.Split(path, "/"), roots[i]) { return false, nil @@ -599,7 +598,7 @@ func (r *Reader) Read() (Bundle, error) { return bundle, err } - bundle.Data = map[string]interface{}{} + bundle.Data = map[string]any{} } var modules []ModuleFile @@ -669,7 +668,7 @@ func (r *Reader) Read() (Bundle, error) { continue } - var value interface{} + var value any r.metrics.Timer(metrics.RegoDataParse).Start() err := util.UnmarshalJSON(buf.Bytes(), &value) @@ -689,7 +688,7 @@ func (r *Reader) Read() (Bundle, error) { continue } - var value interface{} + var value any r.metrics.Timer(metrics.RegoDataParse).Start() err := util.Unmarshal(buf.Bytes(), &value) @@ -778,7 +777,7 @@ func (r *Reader) Read() (Bundle, error) { } if r.includeManifestInData { - var metadata map[string]interface{} + var metadata map[string]any b, err := json.Marshal(&bundle.Manifest) if err != nil { @@ -1069,7 +1068,7 @@ func hashBundleFiles(hash SignatureHasher, b *Bundle) ([]FileInfo, error) { return files, err } - var result map[string]interface{} + var result map[string]any if err := util.Unmarshal(mbs, &result); err != nil { return files, err } @@ -1299,14 +1298,14 @@ func (b Bundle) Equal(other Bundle) bool { func (b Bundle) Copy() Bundle { // Copy data. - var x interface{} = b.Data + var x any = b.Data if err := util.RoundTrip(&x); err != nil { panic(err) } if x != nil { - b.Data = x.(map[string]interface{}) + b.Data = x.(map[string]any) } // Copy modules. @@ -1323,7 +1322,7 @@ func (b Bundle) Copy() Bundle { return b } -func (b *Bundle) insertData(key []string, value interface{}) error { +func (b *Bundle) insertData(key []string, value any) error { // Build an object with the full structure for the value obj, err := mktree(key, value) if err != nil { @@ -1341,13 +1340,13 @@ func (b *Bundle) insertData(key []string, value interface{}) error { return nil } -func (b *Bundle) readData(key []string) *interface{} { +func (b *Bundle) readData(key []string) *any { if len(key) == 0 { if len(b.Data) == 0 { return nil } - var result interface{} = b.Data + var result any = b.Data return &result } @@ -1360,7 +1359,7 @@ func (b *Bundle) readData(key []string) *interface{} { return nil } - childObj, ok := child.(map[string]interface{}) + childObj, ok := child.(map[string]any) if !ok { return nil } @@ -1384,21 +1383,21 @@ func (b *Bundle) Type() string { return SnapshotBundleType } -func mktree(path []string, value interface{}) (map[string]interface{}, error) { +func mktree(path []string, value any) (map[string]any, error) { if len(path) == 0 { // For 0 length path the value is the full tree. - obj, ok := value.(map[string]interface{}) + obj, ok := value.(map[string]any) if !ok { return nil, errors.New("root value must be object") } return obj, nil } - dir := map[string]interface{}{} + dir := map[string]any{} for i := len(path) - 1; i > 0; i-- { dir[path[i]] = value value = dir - dir = map[string]interface{}{} + dir = map[string]any{} } dir[path[0]] = value @@ -1478,9 +1477,7 @@ func MergeWithRegoVersion(bundles []*Bundle, regoVersion ast.RegoVersion, usePat if err != nil { return nil, err } - for k, v := range fileRegoVersions { - result.Manifest.FileRegoVersions[k] = v - } + maps.Copy(result.Manifest.FileRegoVersions, fileRegoVersions) } } @@ -1488,7 +1485,7 @@ func MergeWithRegoVersion(bundles []*Bundle, regoVersion ast.RegoVersion, usePat result.SetRegoVersion(result.RegoVersion(regoVersion)) if result.Data == nil { - result.Data = map[string]interface{}{} + result.Data = map[string]any{} } result.Manifest.Roots = &roots @@ -1598,7 +1595,7 @@ func rootContains(root []string, other []string) bool { return true } -func insertValue(b *Bundle, path string, value interface{}) error { +func insertValue(b *Bundle, path string, value any) error { if err := b.insertData(getNormalizedPath(path), value); err != nil { return fmt.Errorf("bundle load failed on %v: %w", path, err) } @@ -1619,13 +1616,13 @@ func getNormalizedPath(path string) []string { return key } -func dfs(value interface{}, path string, fn func(string, interface{}) (bool, error)) error { +func dfs(value any, path string, fn func(string, any) (bool, error)) error { if stop, err := fn(path, value); err != nil { return err } else if stop { return nil } - obj, ok := value.(map[string]interface{}) + obj, ok := value.(map[string]any) if !ok { return nil } diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/hash.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/hash.go index ab6fcd0f3..5a62d2dc0 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/bundle/hash.go +++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/hash.go @@ -41,7 +41,7 @@ func (alg HashingAlgorithm) String() string { // SignatureHasher computes a signature digest for a file with (structured or unstructured) data and policy type SignatureHasher interface { - HashFile(v interface{}) ([]byte, error) + HashFile(v any) ([]byte, error) } type hasher struct { @@ -77,7 +77,7 @@ func NewSignatureHasher(alg HashingAlgorithm) (SignatureHasher, error) { } // HashFile hashes the file content, JSON or binary, both in golang native format. -func (h *hasher) HashFile(v interface{}) ([]byte, error) { +func (h *hasher) HashFile(v any) ([]byte, error) { hf := h.h() walk(v, hf) return hf.Sum(nil), nil @@ -92,10 +92,10 @@ func (h *hasher) HashFile(v interface{}) ([]byte, error) { // object: Hash {, then each key (in alphabetical order) and digest of the value, then comma (between items) and finally }. // // array: Hash [, then digest of the value, then comma (between items) and finally ]. -func walk(v interface{}, h io.Writer) { +func walk(v any, h io.Writer) { switch x := v.(type) { - case map[string]interface{}: + case map[string]any: _, _ = h.Write([]byte("{")) for i, key := range util.KeysSorted(x) { @@ -109,7 +109,7 @@ func walk(v interface{}, h io.Writer) { } _, _ = h.Write([]byte("}")) - case []interface{}: + case []any: _, _ = h.Write([]byte("[")) for i, e := range x { @@ -127,7 +127,7 @@ func walk(v interface{}, h io.Writer) { } } -func encodePrimitive(v interface{}) []byte { +func encodePrimitive(v any) []byte { var buf bytes.Buffer encoder := json.NewEncoder(&buf) encoder.SetEscapeHTML(false) diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/keys.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/keys.go index aad30a675..dbd8ff269 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/bundle/keys.go +++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/keys.go @@ -105,7 +105,7 @@ func (s *SigningConfig) WithPlugin(plugin string) *SigningConfig { } // GetPrivateKey returns the private key or secret from the signing config -func (s *SigningConfig) GetPrivateKey() (interface{}, error) { +func (s *SigningConfig) GetPrivateKey() (any, error) { block, _ := pem.Decode([]byte(s.Key)) if block != nil { @@ -129,8 +129,8 @@ func (s *SigningConfig) GetPrivateKey() (interface{}, error) { } // GetClaims returns the claims by reading the file specified in the signing config -func (s *SigningConfig) GetClaims() (map[string]interface{}, error) { - var claims map[string]interface{} +func (s *SigningConfig) GetClaims() (map[string]any, error) { + var claims map[string]any bs, err := os.ReadFile(s.ClaimsPath) if err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/sign.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/sign.go index 710e29686..edc41a1e5 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/bundle/sign.go +++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/sign.go @@ -9,6 +9,7 @@ import ( "crypto/rand" "encoding/json" "fmt" + "maps" "github.com/open-policy-agent/opa/internal/jwx/jwa" "github.com/open-policy-agent/opa/internal/jwx/jws" @@ -89,7 +90,7 @@ func (*DefaultSigner) GenerateSignedToken(files []FileInfo, sc *SigningConfig, k } func generatePayload(files []FileInfo, sc *SigningConfig, keyID string) ([]byte, error) { - payload := make(map[string]interface{}) + payload := make(map[string]any) payload["files"] = files if sc.ClaimsPath != "" { @@ -98,9 +99,7 @@ func generatePayload(files []FileInfo, sc *SigningConfig, keyID string) ([]byte, return nil, err } - for claim, value := range claims { - payload[claim] = value - } + maps.Copy(payload, claims) } else if keyID != "" { // keyid claim is deprecated but include it for backwards compatibility. payload["keyid"] = keyID diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/store.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/store.go index 363f7664d..33e6887d8 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/bundle/store.go +++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/store.go @@ -10,6 +10,7 @@ import ( "encoding/json" "errors" "fmt" + "maps" "path/filepath" "strings" @@ -70,7 +71,7 @@ func moduleInfoPath(id string) storage.Path { return append(ModulesInfoBasePath, strings.Trim(id, "/")) } -func read(ctx context.Context, store storage.Store, txn storage.Transaction, path storage.Path) (interface{}, error) { +func read(ctx context.Context, store storage.Store, txn storage.Transaction, path storage.Path) (any, error) { value, err := store.Read(ctx, txn, path) if err != nil { return nil, err @@ -93,7 +94,7 @@ func ReadBundleNamesFromStore(ctx context.Context, store storage.Store, txn stor return nil, err } - bundleMap, ok := value.(map[string]interface{}) + bundleMap, ok := value.(map[string]any) if !ok { return nil, errors.New("corrupt manifest roots") } @@ -118,7 +119,7 @@ func WriteEtagToStore(ctx context.Context, store storage.Store, txn storage.Tran return write(ctx, store, txn, EtagStoragePath(name), etag) } -func write(ctx context.Context, store storage.Store, txn storage.Transaction, path storage.Path, value interface{}) error { +func write(ctx context.Context, store storage.Store, txn storage.Transaction, path storage.Path, value any) error { if err := util.RoundTrip(&value); err != nil { return err } @@ -218,7 +219,7 @@ func ReadWasmModulesFromStore(ctx context.Context, store storage.Store, txn stor return nil, err } - encodedModules, ok := value.(map[string]interface{}) + encodedModules, ok := value.(map[string]any) if !ok { return nil, errors.New("corrupt wasm modules") } @@ -247,7 +248,7 @@ func ReadBundleRootsFromStore(ctx context.Context, store storage.Store, txn stor return nil, err } - sl, ok := value.([]interface{}) + sl, ok := value.([]any) if !ok { return nil, errors.New("corrupt manifest roots") } @@ -288,17 +289,17 @@ func readRevisionFromStore(ctx context.Context, store storage.Store, txn storage // ReadBundleMetadataFromStore returns the metadata in the specified bundle. // If the bundle is not activated, this function will return // storage NotFound error. -func ReadBundleMetadataFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, name string) (map[string]interface{}, error) { +func ReadBundleMetadataFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, name string) (map[string]any, error) { return readMetadataFromStore(ctx, store, txn, metadataPath(name)) } -func readMetadataFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, path storage.Path) (map[string]interface{}, error) { +func readMetadataFromStore(ctx context.Context, store storage.Store, txn storage.Transaction, path storage.Path) (map[string]any, error) { value, err := read(ctx, store, txn, path) if err != nil { return nil, suppressNotFound(err) } - data, ok := value.(map[string]interface{}) + data, ok := value.(map[string]any) if !ok { return nil, errors.New("corrupt manifest metadata") } @@ -451,7 +452,7 @@ func activateBundles(opts *ActivateOpts) error { } // verify valid YAML or JSON value - var x interface{} + var x any err := util.Unmarshal(item.Value, &x) if err != nil { return err @@ -484,12 +485,8 @@ func activateBundles(opts *ActivateOpts) error { // Compile the modules all at once to avoid having to re-do work. remainingAndExtra := make(map[string]*ast.Module) - for name, mod := range remaining { - remainingAndExtra[name] = mod - } - for name, mod := range opts.ExtraModules { - remainingAndExtra[name] = mod - } + maps.Copy(remainingAndExtra, remaining) + maps.Copy(remainingAndExtra, opts.ExtraModules) err = compileModules(opts.Compiler, opts.Metrics, snapshotBundles, remainingAndExtra, opts.legacy, opts.AuthorizationDecisionRef) if err != nil { @@ -615,7 +612,7 @@ func activateDeltaBundles(opts *ActivateOpts, bundles map[string]*Bundle) error return nil } -func valueToManifest(v interface{}) (Manifest, error) { +func valueToManifest(v any) (Manifest, error) { if astV, ok := v.(ast.Value); ok { var err error v, err = ast.JSON(astV) @@ -902,7 +899,7 @@ func writeDataAndModules(ctx context.Context, store storage.Store, txn storage.T return nil } -func writeData(ctx context.Context, store storage.Store, txn storage.Transaction, roots []string, data map[string]interface{}) error { +func writeData(ctx context.Context, store storage.Store, txn storage.Transaction, roots []string, data map[string]any) error { for _, root := range roots { path, ok := storage.ParsePathEscaped("/" + root) if !ok { @@ -930,14 +927,10 @@ func compileModules(compiler *ast.Compiler, m metrics.Metrics, bundles map[strin modules := map[string]*ast.Module{} // preserve any modules already on the compiler - for name, module := range compiler.Modules { - modules[name] = module - } + maps.Copy(modules, compiler.Modules) // preserve any modules passed in from the store - for name, module := range extraModules { - modules[name] = module - } + maps.Copy(modules, extraModules) // include all the new bundle modules for bundleName, b := range bundles { @@ -946,9 +939,7 @@ func compileModules(compiler *ast.Compiler, m metrics.Metrics, bundles map[strin modules[mf.Path] = mf.Parsed } } else { - for name, module := range b.ParsedModules(bundleName) { - modules[name] = module - } + maps.Copy(modules, b.ParsedModules(bundleName)) } } @@ -971,14 +962,10 @@ func writeModules(ctx context.Context, store storage.Store, txn storage.Transact modules := map[string]*ast.Module{} // preserve any modules already on the compiler - for name, module := range compiler.Modules { - modules[name] = module - } + maps.Copy(modules, compiler.Modules) // preserve any modules passed in from the store - for name, module := range extraModules { - modules[name] = module - } + maps.Copy(modules, extraModules) // include all the new bundle modules for bundleName, b := range bundles { @@ -987,9 +974,7 @@ func writeModules(ctx context.Context, store storage.Store, txn storage.Transact modules[mf.Path] = mf.Parsed } } else { - for name, module := range b.ParsedModules(bundleName) { - modules[name] = module - } + maps.Copy(modules, b.ParsedModules(bundleName)) } } @@ -1016,7 +1001,7 @@ func writeModules(ctx context.Context, store storage.Store, txn storage.Transact return nil } -func lookup(path storage.Path, data map[string]interface{}) (interface{}, bool) { +func lookup(path storage.Path, data map[string]any) (any, bool) { if len(path) == 0 { return data, true } @@ -1025,7 +1010,7 @@ func lookup(path storage.Path, data map[string]interface{}) (interface{}, bool) if !ok { return nil, false } - obj, ok := value.(map[string]interface{}) + obj, ok := value.(map[string]any) if !ok { return nil, false } diff --git a/vendor/github.com/open-policy-agent/opa/v1/bundle/verify.go b/vendor/github.com/open-policy-agent/opa/v1/bundle/verify.go index 0645d3aaf..829e98acd 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/bundle/verify.go +++ b/vendor/github.com/open-policy-agent/opa/v1/bundle/verify.go @@ -178,7 +178,7 @@ func VerifyBundleFile(path string, data bytes.Buffer, files map[string]FileInfo) // then recursively order the fields of all objects alphabetically and then apply // the hash function to result to compute the hash. This ensures that the digital signature is // independent of whitespace and other non-semantic JSON features. - var value interface{} + var value any if IsStructuredDoc(path) { err := util.Unmarshal(data.Bytes(), &value) if err != nil { diff --git a/vendor/github.com/open-policy-agent/opa/v1/config/config.go b/vendor/github.com/open-policy-agent/opa/v1/config/config.go index 490f90b90..62bfc6553 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/config/config.go +++ b/vendor/github.com/open-policy-agent/opa/v1/config/config.go @@ -168,13 +168,13 @@ func (c Config) GetPersistenceDirectory() (string, error) { // ActiveConfig returns OPA's active configuration // with the credentials and crypto keys removed -func (c *Config) ActiveConfig() (interface{}, error) { +func (c *Config) ActiveConfig() (any, error) { bs, err := json.Marshal(c) if err != nil { return nil, err } - var result map[string]interface{} + var result map[string]any if err := util.UnmarshalJSON(bs, &result); err != nil { return nil, err } @@ -197,11 +197,11 @@ func (c *Config) ActiveConfig() (interface{}, error) { return result, nil } -func removeServiceCredentials(x interface{}) error { +func removeServiceCredentials(x any) error { switch x := x.(type) { case nil: return nil - case []interface{}: + case []any: for _, v := range x { err := removeKey(v, "credentials") if err != nil { @@ -209,7 +209,7 @@ func removeServiceCredentials(x interface{}) error { } } - case map[string]interface{}: + case map[string]any: for _, v := range x { err := removeKey(v, "credentials") if err != nil { @@ -223,11 +223,11 @@ func removeServiceCredentials(x interface{}) error { return nil } -func removeCryptoKeys(x interface{}) error { +func removeCryptoKeys(x any) error { switch x := x.(type) { case nil: return nil - case map[string]interface{}: + case map[string]any: for _, v := range x { err := removeKey(v, "key", "private_key") if err != nil { @@ -241,8 +241,8 @@ func removeCryptoKeys(x interface{}) error { return nil } -func removeKey(x interface{}, keys ...string) error { - val, ok := x.(map[string]interface{}) +func removeKey(x any, keys ...string) error { + val, ok := x.(map[string]any) if !ok { return errors.New("type assertion error") } diff --git a/vendor/github.com/open-policy-agent/opa/v1/format/format.go b/vendor/github.com/open-policy-agent/opa/v1/format/format.go index 2b0f2af15..9b4237280 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/format/format.go +++ b/vendor/github.com/open-policy-agent/opa/v1/format/format.go @@ -101,7 +101,7 @@ func SourceWithOpts(filename string, src []byte, opts Opts) ([]byte, error) { // MustAst is a helper function to format a Rego AST element. If any errors // occur this function will panic. This is mostly used for test -func MustAst(x interface{}) []byte { +func MustAst(x any) []byte { bs, err := Ast(x) if err != nil { panic(err) @@ -111,7 +111,7 @@ func MustAst(x interface{}) []byte { // MustAstWithOpts is a helper function to format a Rego AST element. If any errors // occur this function will panic. This is mostly used for test -func MustAstWithOpts(x interface{}, opts Opts) []byte { +func MustAstWithOpts(x any, opts Opts) []byte { bs, err := AstWithOpts(x, opts) if err != nil { panic(err) @@ -122,7 +122,7 @@ func MustAstWithOpts(x interface{}, opts Opts) []byte { // Ast formats a Rego AST element. If the passed value is not a valid AST // element, Ast returns nil and an error. If AST nodes are missing locations // an arbitrary location will be used. -func Ast(x interface{}) ([]byte, error) { +func Ast(x any) ([]byte, error) { return AstWithOpts(x, Opts{}) } @@ -156,7 +156,7 @@ func (o fmtOpts) keywords() []string { return append(kws, o.futureKeywords...) } -func AstWithOpts(x interface{}, opts Opts) ([]byte, error) { +func AstWithOpts(x any, opts Opts) ([]byte, error) { // The node has to be deep copied because it may be mutated below. Alternatively, // we could avoid the copy by checking if mutation will occur first. For now, // since format is not latency sensitive, just deep copy in all cases. @@ -249,7 +249,7 @@ func AstWithOpts(x interface{}, opts Opts) ([]byte, error) { x.Imports = ensureRegoV1Import(x.Imports) } - regoV1Imported := moduleIsRegoV1Compatible(x) + regoV1Imported := slices.ContainsFunc(x.Imports, isRegoV1Compatible) if regoVersion == ast.RegoV0CompatV1 || regoVersion == ast.RegoV1 || regoV1Imported { if !opts.DropV0Imports && !regoV1Imported { for _, kw := range o.futureKeywords { @@ -384,9 +384,9 @@ type writer struct { func (w *writer) writeModule(module *ast.Module) error { var pkg *ast.Package - var others []interface{} + var others []any var comments []*ast.Comment - visitor := ast.NewGenericVisitor(func(x interface{}) bool { + visitor := ast.NewGenericVisitor(func(x any) bool { switch x := x.(type) { case *ast.Comment: comments = append(comments, x) @@ -759,7 +759,7 @@ func (w *writer) writeHead(head *ast.Head, isDefault bool, isExpandedConst bool, if len(head.Args) > 0 { w.write("(") - var args []interface{} + var args []any for _, arg := range head.Args { args = append(args, arg) } @@ -1072,7 +1072,7 @@ func (w *writer) writeFunctionCall(expr *ast.Expr, comments []*ast.Comment) ([]* func (w *writer) writeFunctionCallPlain(terms []*ast.Term, comments []*ast.Comment) ([]*ast.Comment, error) { w.write(terms[0].String() + "(") defer w.write(")") - args := make([]interface{}, len(terms)-1) + args := make([]any, len(terms)-1) for i, t := range terms[1:] { args[i] = t } @@ -1405,7 +1405,7 @@ func (w *writer) writeObject(obj ast.Object, loc *ast.Location, comments []*ast. w.write("{") defer w.write("}") - var s []interface{} + var s []any obj.Foreach(func(k, v *ast.Term) { s = append(s, ast.Item(k, v)) }) @@ -1416,7 +1416,7 @@ func (w *writer) writeArray(arr *ast.Array, loc *ast.Location, comments []*ast.C w.write("[") defer w.write("]") - var s []interface{} + var s []any arr.Foreach(func(t *ast.Term) { s = append(s, t) }) @@ -1443,7 +1443,7 @@ func (w *writer) writeSet(set ast.Set, loc *ast.Location, comments []*ast.Commen w.write("{") defer w.write("}") - var s []interface{} + var s []any set.Foreach(func(t *ast.Term) { s = append(s, t) }) @@ -1510,7 +1510,7 @@ func (w *writer) writeComprehension(openChar, closeChar byte, term *ast.Term, bo } func (w *writer) writeComprehensionBody(openChar, closeChar byte, body ast.Body, term, compr *ast.Location, comments []*ast.Comment) ([]*ast.Comment, error) { - exprs := make([]interface{}, 0, len(body)) + exprs := make([]any, 0, len(body)) for _, expr := range body { exprs = append(exprs, expr) } @@ -1613,9 +1613,9 @@ func (w *writer) writeImport(imp *ast.Import) error { return nil } -type entryWriter func(interface{}, []*ast.Comment) ([]*ast.Comment, error) +type entryWriter func(any, []*ast.Comment) ([]*ast.Comment, error) -func (w *writer) writeIterable(elements []interface{}, last *ast.Location, close *ast.Location, comments []*ast.Comment, fn entryWriter) ([]*ast.Comment, error) { +func (w *writer) writeIterable(elements []any, last *ast.Location, close *ast.Location, comments []*ast.Comment, fn entryWriter) ([]*ast.Comment, error) { lines, err := w.groupIterable(elements, last) if err != nil { return nil, err @@ -1658,7 +1658,7 @@ func (w *writer) writeIterable(elements []interface{}, last *ast.Location, close return comments, nil } -func (w *writer) writeIterableLine(elements []interface{}, comments []*ast.Comment, fn entryWriter) ([]*ast.Comment, error) { +func (w *writer) writeIterableLine(elements []any, comments []*ast.Comment, fn entryWriter) ([]*ast.Comment, error) { if len(elements) == 0 { return comments, nil } @@ -1677,7 +1677,7 @@ func (w *writer) writeIterableLine(elements []interface{}, comments []*ast.Comme } func (w *writer) objectWriter() entryWriter { - return func(x interface{}, comments []*ast.Comment) ([]*ast.Comment, error) { + return func(x any, comments []*ast.Comment) ([]*ast.Comment, error) { entry := x.([2]*ast.Term) call, isCall := entry[0].Value.(ast.Call) @@ -1710,7 +1710,7 @@ func (w *writer) objectWriter() entryWriter { } func (w *writer) listWriter() entryWriter { - return func(x interface{}, comments []*ast.Comment) ([]*ast.Comment, error) { + return func(x any, comments []*ast.Comment) ([]*ast.Comment, error) { t, ok := x.(*ast.Term) if ok { call, isCall := t.Value.(ast.Call) @@ -1726,7 +1726,7 @@ func (w *writer) listWriter() entryWriter { // groupIterable will group the `elements` slice into slices according to their // location: anything on the same line will be put into a slice. -func (w *writer) groupIterable(elements []interface{}, last *ast.Location) ([][]interface{}, error) { +func (w *writer) groupIterable(elements []any, last *ast.Location) ([][]any, error) { // Generated vars occur in the AST when we're rendering the result of // partial evaluation in a bundle build with optimization. // Those variables, and wildcard variables have the "default location", @@ -1753,7 +1753,7 @@ func (w *writer) groupIterable(elements []interface{}, last *ast.Location) ([][] return false }) if def { // return as-is - return [][]interface{}{elements}, nil + return [][]any{elements}, nil } } @@ -1765,8 +1765,8 @@ func (w *writer) groupIterable(elements []interface{}, last *ast.Location) ([][] return l }) - var lines [][]interface{} - cur := make([]interface{}, 0, len(elements)) + var lines [][]any + cur := make([]any, 0, len(elements)) for i, t := range elements { elem := t loc, err := getLoc(elem) @@ -1876,7 +1876,7 @@ func partitionComments(comments []*ast.Comment, l *ast.Location) ([]*ast.Comment return before, at, after } -func gatherImports(others []interface{}) (imports []*ast.Import, rest []interface{}) { +func gatherImports(others []any) (imports []*ast.Import, rest []any) { i := 0 loop: for ; i < len(others); i++ { @@ -1890,7 +1890,7 @@ loop: return imports, others[i:] } -func gatherRules(others []interface{}) (rules []*ast.Rule, rest []interface{}) { +func gatherRules(others []any) (rules []*ast.Rule, rest []any) { i := 0 loop: for ; i < len(others); i++ { @@ -1904,12 +1904,12 @@ loop: return rules, others[i:] } -func locLess(a, b interface{}) (bool, error) { +func locLess(a, b any) (bool, error) { c, err := locCmp(a, b) return c < 0, err } -func locCmp(a, b interface{}) (int, error) { +func locCmp(a, b any) (int, error) { al, err := getLoc(a) if err != nil { return 0, err @@ -1934,7 +1934,7 @@ func locCmp(a, b interface{}) (int, error) { return al.Col - bl.Col, nil } -func getLoc(x interface{}) (*ast.Location, error) { +func getLoc(x any) (*ast.Location, error) { switch x := x.(type) { case ast.Node: // *ast.Head, *ast.Expr, *ast.With, *ast.Term return x.Loc(), nil @@ -2206,21 +2206,10 @@ func (d *ArityFormatErrDetail) Lines() []string { } } -func moduleIsRegoV1Compatible(m *ast.Module) bool { - for _, imp := range m.Imports { - if isRegoV1Compatible(imp) { - return true - } - } - return false -} - -var v1StringTerm = ast.StringTerm("v1") - // isRegoV1Compatible returns true if the passed *ast.Import is `rego.v1` func isRegoV1Compatible(imp *ast.Import) bool { path := imp.Path.Value.(ast.Ref) return len(path) == 2 && ast.RegoRootDocument.Equal(path[0]) && - path[1].Equal(v1StringTerm) + path[1].Equal(ast.InternedStringTerm("v1")) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ir/pretty.go b/vendor/github.com/open-policy-agent/opa/v1/ir/pretty.go index 6102c5a91..53d7cbae8 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ir/pretty.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ir/pretty.go @@ -11,7 +11,7 @@ import ( ) // Pretty writes a human-readable representation of an IR object to w. -func Pretty(w io.Writer, x interface{}) error { +func Pretty(w io.Writer, x any) error { pp := &prettyPrinter{ depth: -1, @@ -25,20 +25,20 @@ type prettyPrinter struct { w io.Writer } -func (pp *prettyPrinter) Before(_ interface{}) { +func (pp *prettyPrinter) Before(_ any) { pp.depth++ } -func (pp *prettyPrinter) After(_ interface{}) { +func (pp *prettyPrinter) After(_ any) { pp.depth-- } -func (pp *prettyPrinter) Visit(x interface{}) (Visitor, error) { +func (pp *prettyPrinter) Visit(x any) (Visitor, error) { pp.writeIndent("%T %+v", x, x) return pp, nil } -func (pp *prettyPrinter) writeIndent(f string, a ...interface{}) { +func (pp *prettyPrinter) writeIndent(f string, a ...any) { pad := strings.Repeat("| ", pp.depth) fmt.Fprintf(pp.w, pad+f+"\n", a...) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/ir/walk.go b/vendor/github.com/open-policy-agent/opa/v1/ir/walk.go index 08a8f4244..788f36cd8 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/ir/walk.go +++ b/vendor/github.com/open-policy-agent/opa/v1/ir/walk.go @@ -6,13 +6,13 @@ package ir // Visitor defines the interface for visiting IR nodes. type Visitor interface { - Before(x interface{}) - Visit(x interface{}) (Visitor, error) - After(x interface{}) + Before(x any) + Visit(x any) (Visitor, error) + After(x any) } // Walk invokes the visitor for nodes under x. -func Walk(vis Visitor, x interface{}) error { +func Walk(vis Visitor, x any) error { impl := walkerImpl{ vis: vis, } @@ -25,7 +25,7 @@ type walkerImpl struct { err error } -func (w *walkerImpl) walk(x interface{}) { +func (w *walkerImpl) walk(x any) { if w.err != nil { // abort on error return } diff --git a/vendor/github.com/open-policy-agent/opa/v1/loader/loader.go b/vendor/github.com/open-policy-agent/opa/v1/loader/loader.go index 5e2217473..d7a70ab78 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/loader/loader.go +++ b/vendor/github.com/open-policy-agent/opa/v1/loader/loader.go @@ -30,7 +30,7 @@ import ( // Result represents the result of successfully loading zero or more files. type Result struct { - Documents map[string]interface{} + Documents map[string]any Modules map[string]*RegoFile path []string } @@ -468,13 +468,13 @@ func getSchemaSetByPathKey(path string) ast.Ref { return key } -func loadOneSchema(path string) (interface{}, error) { +func loadOneSchema(path string) (any, error) { bs, err := os.ReadFile(path) if err != nil { return nil, err } - var schema interface{} + var schema any if err := util.Unmarshal(bs, &schema); err != nil { return nil, fmt.Errorf("%s: %w", path, err) } @@ -584,7 +584,7 @@ func SplitPrefix(path string) ([]string, string) { return nil, path } -func (l *Result) merge(path string, result interface{}) error { +func (l *Result) merge(path string, result any) error { switch result := result.(type) { case bundle.Bundle: for _, module := range result.Modules { @@ -603,7 +603,7 @@ func (l *Result) merge(path string, result interface{}) error { } } -func (l *Result) mergeDocument(path string, doc interface{}) error { +func (l *Result) mergeDocument(path string, doc any) error { obj, ok := makeDir(l.path, doc) if !ok { return unsupportedDocumentType(path) @@ -629,7 +629,7 @@ func (l *Result) withParent(p string) *Result { func newResult() *Result { return &Result{ - Documents: map[string]interface{}{}, + Documents: map[string]any{}, Modules: map[string]*RegoFile{}, } } @@ -719,7 +719,7 @@ func allRec(fsys fs.FS, path string, filter Filter, errors *Errors, loaded *Resu } } -func loadKnownTypes(path string, bs []byte, m metrics.Metrics, opts ast.ParserOptions) (interface{}, error) { +func loadKnownTypes(path string, bs []byte, m metrics.Metrics, opts ast.ParserOptions) (any, error) { switch filepath.Ext(path) { case ".json": return loadJSON(path, bs, m) @@ -739,7 +739,7 @@ func loadKnownTypes(path string, bs []byte, m metrics.Metrics, opts ast.ParserOp return nil, unrecognizedFile(path) } -func loadFileForAnyType(path string, bs []byte, m metrics.Metrics, opts ast.ParserOptions) (interface{}, error) { +func loadFileForAnyType(path string, bs []byte, m metrics.Metrics, opts ast.ParserOptions) (any, error) { module, err := loadRego(path, bs, m, opts) if err == nil { return module, nil @@ -784,9 +784,9 @@ func loadRego(path string, bs []byte, m metrics.Metrics, opts ast.ParserOptions) return result, nil } -func loadJSON(path string, bs []byte, m metrics.Metrics) (interface{}, error) { +func loadJSON(path string, bs []byte, m metrics.Metrics) (any, error) { m.Timer(metrics.RegoDataParse).Start() - var x interface{} + var x any err := util.UnmarshalJSON(bs, &x) m.Timer(metrics.RegoDataParse).Stop() @@ -796,7 +796,7 @@ func loadJSON(path string, bs []byte, m metrics.Metrics) (interface{}, error) { return x, nil } -func loadYAML(path string, bs []byte, m metrics.Metrics) (interface{}, error) { +func loadYAML(path string, bs []byte, m metrics.Metrics) (any, error) { m.Timer(metrics.RegoDataParse).Start() bs, err := yaml.YAMLToJSON(bs) m.Timer(metrics.RegoDataParse).Stop() @@ -806,15 +806,15 @@ func loadYAML(path string, bs []byte, m metrics.Metrics) (interface{}, error) { return loadJSON(path, bs, m) } -func makeDir(path []string, x interface{}) (map[string]interface{}, bool) { +func makeDir(path []string, x any) (map[string]any, bool) { if len(path) == 0 { - obj, ok := x.(map[string]interface{}) + obj, ok := x.(map[string]any) if !ok { return nil, false } return obj, true } - return makeDir(path[:len(path)-1], map[string]interface{}{path[len(path)-1]: x}) + return makeDir(path[:len(path)-1], map[string]any{path[len(path)-1]: x}) } // isUNC reports whether path is a UNC path. diff --git a/vendor/github.com/open-policy-agent/opa/v1/logging/logging.go b/vendor/github.com/open-policy-agent/opa/v1/logging/logging.go index 7a1edfb56..9e36a20bf 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/logging/logging.go +++ b/vendor/github.com/open-policy-agent/opa/v1/logging/logging.go @@ -3,6 +3,7 @@ package logging import ( "context" "io" + "maps" "net/http" "github.com/sirupsen/logrus" @@ -24,12 +25,12 @@ const ( // Logger provides interface for OPA logger implementations type Logger interface { - Debug(fmt string, a ...interface{}) - Info(fmt string, a ...interface{}) - Error(fmt string, a ...interface{}) - Warn(fmt string, a ...interface{}) + Debug(fmt string, a ...any) + Info(fmt string, a ...any) + Error(fmt string, a ...any) + Warn(fmt string, a ...any) - WithFields(map[string]interface{}) Logger + WithFields(map[string]any) Logger GetLevel() Level SetLevel(Level) @@ -38,7 +39,7 @@ type Logger interface { // StandardLogger is the default OPA logger implementation. type StandardLogger struct { logger *logrus.Logger - fields map[string]interface{} + fields map[string]any } // New returns a new standard logger. @@ -68,20 +69,16 @@ func (l *StandardLogger) SetFormatter(formatter logrus.Formatter) { } // WithFields provides additional fields to include in log output -func (l *StandardLogger) WithFields(fields map[string]interface{}) Logger { +func (l *StandardLogger) WithFields(fields map[string]any) Logger { cp := *l - cp.fields = make(map[string]interface{}) - for k, v := range l.fields { - cp.fields[k] = v - } - for k, v := range fields { - cp.fields[k] = v - } + cp.fields = make(map[string]any) + maps.Copy(cp.fields, l.fields) + maps.Copy(cp.fields, fields) return &cp } // getFields returns additional fields of this logger -func (l *StandardLogger) getFields() map[string]interface{} { +func (l *StandardLogger) getFields() map[string]any { return l.fields } @@ -126,7 +123,7 @@ func (l *StandardLogger) GetLevel() Level { } // Debug logs at debug level -func (l *StandardLogger) Debug(fmt string, a ...interface{}) { +func (l *StandardLogger) Debug(fmt string, a ...any) { if len(a) == 0 { l.logger.WithFields(l.getFields()).Debug(fmt) return @@ -135,7 +132,7 @@ func (l *StandardLogger) Debug(fmt string, a ...interface{}) { } // Info logs at info level -func (l *StandardLogger) Info(fmt string, a ...interface{}) { +func (l *StandardLogger) Info(fmt string, a ...any) { if len(a) == 0 { l.logger.WithFields(l.getFields()).Info(fmt) return @@ -144,7 +141,7 @@ func (l *StandardLogger) Info(fmt string, a ...interface{}) { } // Error logs at error level -func (l *StandardLogger) Error(fmt string, a ...interface{}) { +func (l *StandardLogger) Error(fmt string, a ...any) { if len(a) == 0 { l.logger.WithFields(l.getFields()).Error(fmt) return @@ -153,7 +150,7 @@ func (l *StandardLogger) Error(fmt string, a ...interface{}) { } // Warn logs at warn level -func (l *StandardLogger) Warn(fmt string, a ...interface{}) { +func (l *StandardLogger) Warn(fmt string, a ...any) { if len(a) == 0 { l.logger.WithFields(l.getFields()).Warn(fmt) return @@ -164,7 +161,7 @@ func (l *StandardLogger) Warn(fmt string, a ...interface{}) { // NoOpLogger logging implementation that does nothing type NoOpLogger struct { level Level - fields map[string]interface{} + fields map[string]any } // NewNoOpLogger instantiates new NoOpLogger @@ -176,23 +173,23 @@ func NewNoOpLogger() *NoOpLogger { // WithFields provides additional fields to include in log output. // Implemented here primarily to be able to switch between implementations without loss of data. -func (l *NoOpLogger) WithFields(fields map[string]interface{}) Logger { +func (l *NoOpLogger) WithFields(fields map[string]any) Logger { cp := *l cp.fields = fields return &cp } // Debug noop -func (*NoOpLogger) Debug(string, ...interface{}) {} +func (*NoOpLogger) Debug(string, ...any) {} // Info noop -func (*NoOpLogger) Info(string, ...interface{}) {} +func (*NoOpLogger) Info(string, ...any) {} // Error noop -func (*NoOpLogger) Error(string, ...interface{}) {} +func (*NoOpLogger) Error(string, ...any) {} // Warn noop -func (*NoOpLogger) Warn(string, ...interface{}) {} +func (*NoOpLogger) Warn(string, ...any) {} // SetLevel set log level func (l *NoOpLogger) SetLevel(level Level) { diff --git a/vendor/github.com/open-policy-agent/opa/v1/metrics/metrics.go b/vendor/github.com/open-policy-agent/opa/v1/metrics/metrics.go index f1038e8bc..19d9c7d37 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/metrics/metrics.go +++ b/vendor/github.com/open-policy-agent/opa/v1/metrics/metrics.go @@ -48,13 +48,13 @@ type Metrics interface { Timer(name string) Timer Histogram(name string) Histogram Counter(name string) Counter - All() map[string]interface{} + All() map[string]any Clear() json.Marshaler } type TimerMetrics interface { - Timers() map[string]interface{} + Timers() map[string]any } type metrics struct { @@ -73,7 +73,7 @@ func New() Metrics { type metric struct { Key string - Value interface{} + Value any } func (*metrics) Info() Info { @@ -144,10 +144,10 @@ func (m *metrics) Counter(name string) Counter { return c } -func (m *metrics) All() map[string]interface{} { +func (m *metrics) All() map[string]any { m.mtx.Lock() defer m.mtx.Unlock() - result := map[string]interface{}{} + result := map[string]any{} for name, timer := range m.timers { result[m.formatKey(name, timer)] = timer.Value() } @@ -160,10 +160,10 @@ func (m *metrics) All() map[string]interface{} { return result } -func (m *metrics) Timers() map[string]interface{} { +func (m *metrics) Timers() map[string]any { m.mtx.Lock() defer m.mtx.Unlock() - ts := map[string]interface{}{} + ts := map[string]any{} for n, t := range m.timers { ts[m.formatKey(n, t)] = t.Value() } @@ -178,7 +178,7 @@ func (m *metrics) Clear() { m.counters = map[string]Counter{} } -func (*metrics) formatKey(name string, metrics interface{}) string { +func (*metrics) formatKey(name string, metrics any) string { switch metrics.(type) { case Timer: return "timer_" + name + "_ns" @@ -194,7 +194,7 @@ func (*metrics) formatKey(name string, metrics interface{}) string { // Timer defines the interface for a restartable timer that accumulates elapsed // time. type Timer interface { - Value() interface{} + Value() any Int64() int64 Start() Stop() int64 @@ -220,7 +220,7 @@ func (t *timer) Stop() int64 { return delta } -func (t *timer) Value() interface{} { +func (t *timer) Value() any { return t.Int64() } @@ -232,7 +232,7 @@ func (t *timer) Int64() int64 { // Histogram defines the interface for a histogram with hardcoded percentiles. type Histogram interface { - Value() interface{} + Value() any Update(int64) } @@ -253,8 +253,8 @@ func (h *histogram) Update(v int64) { h.hist.Update(v) } -func (h *histogram) Value() interface{} { - values := map[string]interface{}{} +func (h *histogram) Value() any { + values := map[string]any{} snap := h.hist.Snapshot() percentiles := snap.Percentiles([]float64{ 0.5, @@ -282,7 +282,7 @@ func (h *histogram) Value() interface{} { // Counter defines the interface for a monotonic increasing counter. type Counter interface { - Value() interface{} + Value() any Incr() Add(n uint64) } @@ -299,11 +299,11 @@ func (c *counter) Add(n uint64) { atomic.AddUint64(&c.c, n) } -func (c *counter) Value() interface{} { +func (c *counter) Value() any { return atomic.LoadUint64(&c.c) } -func Statistics(num ...int64) interface{} { +func Statistics(num ...int64) any { t := newHistogram() for _, n := range num { t.Update(n) diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/plugins.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/plugins.go index 7e8b900bf..430b15efd 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/plugins/plugins.go +++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/plugins.go @@ -9,6 +9,7 @@ import ( "context" "errors" "fmt" + "maps" mr "math/rand" "sync" "time" @@ -85,8 +86,8 @@ import ( // After a plugin has been created subsequent status updates can be // send anytime the plugin enters a ready or error state. type Factory interface { - Validate(manager *Manager, config []byte) (interface{}, error) - New(manager *Manager, config interface{}) Plugin + Validate(manager *Manager, config []byte) (any, error) + New(manager *Manager, config any) Plugin } // Plugin defines the interface OPA uses to manage your plugin. @@ -104,7 +105,7 @@ type Factory interface { type Plugin interface { Start(ctx context.Context) error Stop(ctx context.Context) - Reconfigure(ctx context.Context, config interface{}) + Reconfigure(ctx context.Context, config any) } // Triggerable defines the interface plugins use for manual plugin triggers. @@ -791,9 +792,7 @@ func (m *Manager) Reconfigure(config *config.Config) error { if config.Labels == nil { config.Labels = m.bootstrapConfigLabels } else { - for label, value := range m.bootstrapConfigLabels { - config.Labels[label] = value - } + maps.Copy(config.Labels, m.bootstrapConfigLabels) } // don't erase persistence directory @@ -803,13 +802,9 @@ func (m *Manager) Reconfigure(config *config.Config) error { m.Config = config m.interQueryBuiltinCacheConfig = interQueryBuiltinCacheConfig - for name, client := range services { //nolint:gocritic - m.services[name] = client - } - for name, key := range keys { - m.keys[name] = key - } + maps.Copy(m.services, services) + maps.Copy(m.keys, keys) for _, trigger := range m.registeredCacheTriggers { trigger(interQueryBuiltinCacheConfig) @@ -861,9 +856,7 @@ func (m *Manager) UpdatePluginStatus(pluginName string, status *Status) { defer m.mtx.Unlock() m.pluginStatus[pluginName] = status toNotify = make(map[string]StatusListener, len(m.pluginStatusListeners)) - for k, v := range m.pluginStatusListeners { - toNotify[k] = v - } + maps.Copy(toNotify, m.pluginStatusListeners) statuses = m.copyPluginStatus() }() @@ -1097,7 +1090,7 @@ func (m *Manager) sendOPAUpdateLoop(ctx context.Context) { opaReportNotify = false _, err := m.reporter.SendReport(ctx) if err != nil { - m.logger.WithFields(map[string]interface{}{"err": err}).Debug("Unable to send OPA telemetry report.") + m.logger.WithFields(map[string]any{"err": err}).Debug("Unable to send OPA telemetry report.") } } diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/auth.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/auth.go index 7ef9bf7df..9a8d58cc6 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/auth.go +++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/auth.go @@ -21,6 +21,7 @@ import ( "fmt" "hash" "io" + "maps" "math/big" "net/http" "net/url" @@ -193,6 +194,15 @@ type awsKmsKeyConfig struct { Algorithm string `json:"algorithm"` } +type azureKeyVaultConfig struct { + Key string `json:"key"` + KeyVersion string `json:"key_version"` + Alg string `json:"key_algorithm"` + Vault string `json:"vault"` + URL *url.URL + APIVersion string `json:"api_version"` +} + func convertSignatureToBase64(alg string, der []byte) (string, error) { r, s, derErr := pointsFromDER(der) if derErr != nil { @@ -265,42 +275,47 @@ func messageDigest(message []byte, alg string) ([]byte, error) { var digest hash.Hash switch alg { - case "ECDSA_SHA_256": + case "ECDSA_SHA_256", "ES256", "ES256K", "PS256", "RS256": digest = sha256.New() - case "ECDSA_SHA_384": + case "ECDSA_SHA_384", "ES384", "PS384", "RS384": digest = sha512.New384() - case "ECDSA_SHA_512": + case "ECDSA_SHA_512", "ES512", "PS512", "RS512": digest = sha512.New() default: return []byte{}, fmt.Errorf("unsupported sign algorithm %s", alg) } - digest.Write(message) + _, err := digest.Write(message) + if err != nil { + return nil, err + } return digest.Sum(nil), nil } // oauth2ClientCredentialsAuthPlugin represents authentication via a bearer token in the HTTP Authorization header // obtained through the OAuth2 client credentials flow type oauth2ClientCredentialsAuthPlugin struct { - GrantType string `json:"grant_type"` - TokenURL string `json:"token_url"` - ClientID string `json:"client_id"` - ClientSecret string `json:"client_secret"` - SigningKeyID string `json:"signing_key"` - Thumbprint string `json:"thumbprint"` - Claims map[string]interface{} `json:"additional_claims"` - IncludeJti bool `json:"include_jti_claim"` - Scopes []string `json:"scopes,omitempty"` - AdditionalHeaders map[string]string `json:"additional_headers,omitempty"` - AdditionalParameters map[string]string `json:"additional_parameters,omitempty"` - AWSKmsKey *awsKmsKeyConfig `json:"aws_kms,omitempty"` - AWSSigningPlugin *awsSigningAuthPlugin `json:"aws_signing,omitempty"` - ClientAssertionType string `json:"client_assertion_type"` - ClientAssertion string `json:"client_assertion"` - ClientAssertionPath string `json:"client_assertion_path"` + GrantType string `json:"grant_type"` + TokenURL string `json:"token_url"` + ClientID string `json:"client_id"` + ClientSecret string `json:"client_secret"` + SigningKeyID string `json:"signing_key"` + Thumbprint string `json:"thumbprint"` + Claims map[string]any `json:"additional_claims"` + IncludeJti bool `json:"include_jti_claim"` + Scopes []string `json:"scopes,omitempty"` + AdditionalHeaders map[string]string `json:"additional_headers,omitempty"` + AdditionalParameters map[string]string `json:"additional_parameters,omitempty"` + AWSKmsKey *awsKmsKeyConfig `json:"aws_kms,omitempty"` + AWSSigningPlugin *awsSigningAuthPlugin `json:"aws_signing,omitempty"` + AzureKeyVault *azureKeyVaultConfig `json:"azure_keyvault,omitempty"` + AzureSigningPlugin *azureSigningAuthPlugin `json:"azure_signing,omitempty"` + ClientAssertionType string `json:"client_assertion_type"` + ClientAssertion string `json:"client_assertion"` + ClientAssertionPath string `json:"client_assertion_path"` signingKey *keys.Config - signingKeyParsed interface{} + signingKeyParsed any tokenCache *oauth2Token tlsSkipVerify bool logger logging.Logger @@ -311,15 +326,13 @@ type oauth2Token struct { ExpiresAt time.Time } -func (ap *oauth2ClientCredentialsAuthPlugin) createAuthJWT(ctx context.Context, extClaims map[string]interface{}, signingKey interface{}) (*string, error) { +func (ap *oauth2ClientCredentialsAuthPlugin) createJWSParts(extClaims map[string]any) ([]byte, []byte, string, error) { now := time.Now() - claims := map[string]interface{}{ + claims := map[string]any{ "iat": now.Unix(), "exp": now.Add(10 * time.Minute).Unix(), } - for k, v := range extClaims { - claims[k] = v - } + maps.Copy(claims, extClaims) if len(ap.Scopes) > 0 { claims["scope"] = strings.Join(ap.Scopes, " ") @@ -328,50 +341,66 @@ func (ap *oauth2ClientCredentialsAuthPlugin) createAuthJWT(ctx context.Context, if ap.IncludeJti { jti, err := uuid.New(rand.Reader) if err != nil { - return nil, err + return nil, nil, "", err } claims["jti"] = jti } payload, err := json.Marshal(claims) if err != nil { - return nil, err + return nil, nil, "", err } var jwsHeaders []byte var signatureAlg string - if ap.AWSKmsKey == nil { + switch { + case ap.AWSKmsKey == nil && ap.AzureKeyVault == nil: signatureAlg = ap.signingKey.Algorithm - } else { + case ap.AWSKmsKey != nil && ap.AWSKmsKey.Algorithm != "": signatureAlg, err = ap.mapKMSAlgToSign(ap.AWSKmsKey.Algorithm) if err != nil { - return nil, err + return nil, nil, "", err } + case ap.AzureKeyVault != nil && ap.AzureKeyVault.Alg != "": + signatureAlg = ap.AzureKeyVault.Alg } if ap.Thumbprint != "" { bytes, err := hex.DecodeString(ap.Thumbprint) if err != nil { - return nil, err + return nil, nil, "", err } x5t := base64.URLEncoding.EncodeToString(bytes) - jwsHeaders = []byte(fmt.Sprintf(`{"typ":"JWT","alg":"%s","x5t":"%s"}`, signatureAlg, x5t)) + jwsHeaders = fmt.Appendf(nil, `{"typ":"JWT","alg":"%s","x5t":"%s"}`, signatureAlg, x5t) } else { - jwsHeaders = []byte(fmt.Sprintf(`{"typ":"JWT","alg":"%s"}`, signatureAlg)) + jwsHeaders = fmt.Appendf(nil, `{"typ":"JWT","alg":"%s"}`, signatureAlg) } - var jwsCompact []byte - if ap.AWSKmsKey == nil { - jwsCompact, err = jws.SignLiteral(payload, - jwa.SignatureAlgorithm(signatureAlg), + + return jwsHeaders, payload, signatureAlg, nil +} + +func (ap *oauth2ClientCredentialsAuthPlugin) createAuthJWT(ctx context.Context, extClaims map[string]any, signingKey any) (*string, error) { + header, payload, alg, err := ap.createJWSParts(extClaims) + if err != nil { + return nil, err + } + + var clientAssertion []byte + switch { + case ap.AWSKmsKey != nil: + clientAssertion, err = ap.SignWithKMS(ctx, payload, header) + case ap.AzureKeyVault != nil: + clientAssertion, err = ap.SignWithKeyVault(ctx, payload, header) + default: + clientAssertion, err = jws.SignLiteral(payload, + jwa.SignatureAlgorithm(alg), signingKey, - jwsHeaders, + header, rand.Reader) - } else { - jwsCompact, err = ap.SignWithKMS(ctx, payload, jwsHeaders) } if err != nil { return nil, err } - jwt := string(jwsCompact) + jwt := string(clientAssertion) return &jwt, nil } @@ -420,6 +449,28 @@ func (ap *oauth2ClientCredentialsAuthPlugin) SignWithKMS(ctx context.Context, pa return nil, errors.New("missing AWS credentials, failed to sign the assertion with kms") } +func (ap *oauth2ClientCredentialsAuthPlugin) SignWithKeyVault(ctx context.Context, payload []byte, hdrBuf []byte) ([]byte, error) { + if ap.AzureSigningPlugin == nil { + return nil, errors.New("missing Azure credentials, failed to sign the assertion with KeyVault") + } + + encodedHdr := base64.RawURLEncoding.EncodeToString(hdrBuf) + encodedPayload := base64.RawURLEncoding.EncodeToString(payload) + input := encodedHdr + "." + encodedPayload + digest, err := messageDigest([]byte(input), ap.AzureSigningPlugin.keyVaultSignPlugin.config.Alg) + if err != nil { + fmt.Println("unsupported algorithm", ap.AzureSigningPlugin.keyVaultSignPlugin.config.Alg) + return nil, err + } + + signature, err := ap.AzureSigningPlugin.SignDigest(ctx, digest) + if err != nil { + return nil, err + } + + return []byte(input + "." + signature), nil +} + func (ap *oauth2ClientCredentialsAuthPlugin) parseSigningKey(c Config) (err error) { if ap.SigningKeyID == "" { return errors.New("signing_key required for jwt_bearer grant type") @@ -475,6 +526,7 @@ func (ap *oauth2ClientCredentialsAuthPlugin) NewClient(c Config) (*http.Client, clientCredentialExists["client_secret"] = ap.ClientSecret != "" clientCredentialExists["signing_key"] = ap.SigningKeyID != "" clientCredentialExists["aws_kms"] = ap.AWSKmsKey != nil + clientCredentialExists["azure_keyvault"] = ap.AzureKeyVault != nil clientCredentialExists["client_assertion"] = ap.ClientAssertion != "" clientCredentialExists["client_assertion_path"] = ap.ClientAssertionPath != "" @@ -487,14 +539,15 @@ func (ap *oauth2ClientCredentialsAuthPlugin) NewClient(c Config) (*http.Client, } if notEmptyVarCount == 0 { - return nil, errors.New("please provide one of client_secret, signing_key, aws_kms, client_assertion, or client_assertion_path required") + return nil, errors.New("please provide one of client_secret, signing_key, aws_kms, azure_keyvault, client_assertion, or client_assertion_path required") } if notEmptyVarCount > 1 { - return nil, errors.New("can only use one of client_secret, signing_key, aws_kms, client_assertion, or client_assertion_path") + return nil, errors.New("can only use one of client_secret, signing_key, aws_kms, azure_keyvault, client_assertion, or client_assertion_path") } - if clientCredentialExists["aws_kms"] { + switch { + case clientCredentialExists["aws_kms"]: if ap.AWSSigningPlugin == nil { return nil, errors.New("aws_kms and aws_signing required") } @@ -503,81 +556,35 @@ func (ap *oauth2ClientCredentialsAuthPlugin) NewClient(c Config) (*http.Client, if err != nil { return nil, err } - } else if clientCredentialExists["client_assertion"] { + case clientCredentialExists["azure_keyvault"]: + _, err := ap.AzureSigningPlugin.NewClient(c) + if err != nil { + return nil, err + } + case clientCredentialExists["client_assertion"]: if ap.ClientAssertionType == "" { ap.ClientAssertionType = defaultClientAssertionType } if ap.ClientID == "" { return nil, errors.New("client_id and client_assertion required") } - } else if clientCredentialExists["client_assertion_path"] { + case clientCredentialExists["client_assertion_path"]: if ap.ClientAssertionType == "" { ap.ClientAssertionType = defaultClientAssertionType } if ap.ClientID == "" { return nil, errors.New("client_id and client_assertion_path required") } - } else if clientCredentialExists["client_secret"] { - if ap.ClientID == "" { - return nil, errors.New("client_id and client_secret required") - } + case clientCredentialExists["client_secret"] && ap.ClientID == "": + return nil, errors.New("client_id and client_secret required") } } return DefaultRoundTripperClient(t, *c.ResponseHeaderTimeoutSeconds), nil } -// requestToken tries to obtain an access token using either the client credentials flow -// https://tools.ietf.org/html/rfc6749#section-4.4 -// or the JWT authorization grant -// https://tools.ietf.org/html/rfc7523 -func (ap *oauth2ClientCredentialsAuthPlugin) requestToken(ctx context.Context) (*oauth2Token, error) { +func (ap *oauth2ClientCredentialsAuthPlugin) createTokenReqBody(ctx context.Context) (url.Values, error) { body := url.Values{} - if ap.GrantType == grantTypeJwtBearer { - authJwt, err := ap.createAuthJWT(ctx, ap.Claims, ap.signingKeyParsed) - if err != nil { - return nil, err - } - body.Add("grant_type", "urn:ietf:params:oauth:grant-type:jwt-bearer") - body.Add("assertion", *authJwt) - } else { - body.Add("grant_type", grantTypeClientCredentials) - - if ap.SigningKeyID != "" || ap.AWSKmsKey != nil { - authJwt, err := ap.createAuthJWT(ctx, ap.Claims, ap.signingKeyParsed) - if err != nil { - return nil, err - } - body.Add("client_assertion_type", defaultClientAssertionType) - body.Add("client_assertion", *authJwt) - - if ap.ClientID != "" { - body.Add("client_id", ap.ClientID) - } - } else if ap.ClientAssertion != "" { - if ap.ClientAssertionType == "" { - ap.ClientAssertionType = defaultClientAssertionType - } - if ap.ClientID != "" { - body.Add("client_id", ap.ClientID) - } - body.Add("client_assertion_type", ap.ClientAssertionType) - body.Add("client_assertion", ap.ClientAssertion) - } else if ap.ClientAssertionPath != "" { - if ap.ClientAssertionType == "" { - ap.ClientAssertionType = defaultClientAssertionType - } - bytes, err := os.ReadFile(ap.ClientAssertionPath) - if err != nil { - return nil, err - } - if ap.ClientID != "" { - body.Add("client_id", ap.ClientID) - } - body.Add("client_assertion_type", ap.ClientAssertionType) - body.Add("client_assertion", strings.TrimSpace(string(bytes))) - } - } if len(ap.Scopes) > 0 { body.Add("scope", strings.Join(ap.Scopes, " ")) @@ -587,7 +594,69 @@ func (ap *oauth2ClientCredentialsAuthPlugin) requestToken(ctx context.Context) ( body.Set(k, v) } - r, err := http.NewRequestWithContext(ctx, "POST", ap.TokenURL, strings.NewReader(body.Encode())) + if ap.GrantType == grantTypeJwtBearer { + authJWT, err := ap.createAuthJWT(ctx, ap.Claims, ap.signingKeyParsed) + if err != nil { + return nil, err + } + body.Add("grant_type", "urn:ietf:params:oauth:grant-type:jwt-bearer") + body.Add("assertion", *authJWT) + return body, nil + } + + body.Add("grant_type", grantTypeClientCredentials) + + switch { + case ap.SigningKeyID != "" || ap.AWSKmsKey != nil || ap.AzureKeyVault != nil: + authJwt, err := ap.createAuthJWT(ctx, ap.Claims, ap.signingKeyParsed) + if err != nil { + return nil, err + } + body.Add("client_assertion_type", defaultClientAssertionType) + body.Add("client_assertion", *authJwt) + + if ap.ClientID != "" { + body.Add("client_id", ap.ClientID) + } + case ap.ClientAssertion != "": + if ap.ClientAssertionType == "" { + ap.ClientAssertionType = defaultClientAssertionType + } + if ap.ClientID != "" { + body.Add("client_id", ap.ClientID) + } + body.Add("client_assertion_type", ap.ClientAssertionType) + body.Add("client_assertion", ap.ClientAssertion) + + case ap.ClientAssertionPath != "": + if ap.ClientAssertionType == "" { + ap.ClientAssertionType = defaultClientAssertionType + } + bytes, err := os.ReadFile(ap.ClientAssertionPath) + if err != nil { + return nil, err + } + if ap.ClientID != "" { + body.Add("client_id", ap.ClientID) + } + body.Add("client_assertion_type", ap.ClientAssertionType) + body.Add("client_assertion", strings.TrimSpace(string(bytes))) + } + + return body, nil +} + +// requestToken tries to obtain an access token using either the client credentials flow +// https://tools.ietf.org/html/rfc6749#section-4.4 +// or the JWT authorization grant +// https://tools.ietf.org/html/rfc7523 +func (ap *oauth2ClientCredentialsAuthPlugin) requestToken(ctx context.Context) (*oauth2Token, error) { + body, err := ap.createTokenReqBody(ctx) + if err != nil { + return nil, err + } + + r, err := http.NewRequestWithContext(ctx, http.MethodPost, ap.TokenURL, strings.NewReader(body.Encode())) if err != nil { return nil, err } @@ -769,6 +838,7 @@ type awsSigningAuthPlugin struct { AWSAssumeRoleCredentials *awsAssumeRoleCredentialService `json:"assume_role_credentials,omitempty"` AWSWebIdentityCredentials *awsWebIdentityCredentialService `json:"web_identity_credentials,omitempty"` AWSProfileCredentials *awsProfileCredentialService `json:"profile_credentials,omitempty"` + AWSSSOCredentials *awsSSOCredentialsService `json:"sso_credentials,omitempty"` AWSService string `json:"service,omitempty"` AWSSignatureVersion string `json:"signature_version,omitempty"` @@ -884,6 +954,11 @@ func (ap *awsSigningAuthPlugin) awsCredentialService() awsCredentialService { chain.addService(ap.AWSMetadataCredentials) } + if ap.AWSSSOCredentials != nil { + ap.AWSSSOCredentials.logger = ap.logger + chain.addService(ap.AWSSSOCredentials) + } + return &chain } @@ -941,6 +1016,7 @@ func (ap *awsSigningAuthPlugin) validateAndSetDefaults(serviceType string) error cfgs[ap.AWSAssumeRoleCredentials != nil]++ cfgs[ap.AWSWebIdentityCredentials != nil]++ cfgs[ap.AWSProfileCredentials != nil]++ + cfgs[ap.AWSSSOCredentials != nil]++ if cfgs[true] == 0 { return errors.New("a AWS credential service must be specified when S3 signing is enabled") @@ -1007,3 +1083,84 @@ func (ap *awsSigningAuthPlugin) SignDigest(ctx context.Context, digest []byte, k return "", fmt.Errorf(`cannot use SignDigest with aws service %q`, ap.AWSService) } } + +type azureSigningAuthPlugin struct { + MIAuthPlugin *azureManagedIdentitiesAuthPlugin `json:"azure_managed_identity,omitempty"` + keyVaultSignPlugin *azureKeyVaultSignPlugin + keyVaultConfig *azureKeyVaultConfig + host string + Service string `json:"service"` + logger logging.Logger +} + +func (ap *azureSigningAuthPlugin) NewClient(c Config) (*http.Client, error) { + t, err := DefaultTLSConfig(c) + if err != nil { + return nil, err + } + + tknURL, err := url.Parse(c.URL) + if err != nil { + return nil, err + } + + ap.host = tknURL.Host + + if ap.logger == nil { + ap.logger = c.logger + } + + if c.Credentials.OAuth2.AzureKeyVault == nil { + return nil, errors.New("missing keyvault config") + } + ap.keyVaultConfig = c.Credentials.OAuth2.AzureKeyVault + + if err := ap.validateAndSetDefaults(); err != nil { + return nil, err + } + + return DefaultRoundTripperClient(t, *c.ResponseHeaderTimeoutSeconds), nil +} + +func (ap *azureSigningAuthPlugin) validateAndSetDefaults() error { + if ap.MIAuthPlugin == nil { + return errors.New("missing azure managed identity config") + } + ap.MIAuthPlugin.setDefaults() + + if ap.keyVaultSignPlugin != nil { + return nil + } + ap.keyVaultConfig.URL = &url.URL{ + Scheme: "https", + Host: ap.keyVaultConfig.Vault + ".vault.azure.net", + } + ap.keyVaultSignPlugin = newKeyVaultSignPlugin(ap.MIAuthPlugin, ap.keyVaultConfig) + ap.keyVaultSignPlugin.setDefaults() + ap.keyVaultConfig = &ap.keyVaultSignPlugin.config + + return nil +} + +func (ap *azureSigningAuthPlugin) Prepare(req *http.Request) error { + switch ap.Service { + case "keyvault": + tkn, err := ap.keyVaultSignPlugin.tokener() + if err != nil { + return err + } + req.Header.Add("Authorization", "Bearer "+tkn) + return nil + default: + return fmt.Errorf("azureSigningAuthPlugin.Prepare() with %s not supported", ap.Service) + } +} + +func (ap *azureSigningAuthPlugin) SignDigest(ctx context.Context, digest []byte) (string, error) { + switch ap.Service { + case "keyvault": + return ap.keyVaultSignPlugin.SignDigest(ctx, digest) + default: + return "", fmt.Errorf(`cannot use SignDigest with azure service %q`, ap.Service) + } +} diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/aws.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/aws.go index a610a8014..45c708ab8 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/aws.go +++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/aws.go @@ -5,7 +5,10 @@ package rest import ( + "bytes" "context" + "crypto/sha1" + "encoding/hex" "encoding/json" "encoding/xml" "errors" @@ -13,6 +16,7 @@ import ( "net/http" "net/url" "os" + "path" "path/filepath" "strings" "time" @@ -51,6 +55,7 @@ const ( awsRoleArnEnvVar = "AWS_ROLE_ARN" awsWebIdentityTokenFileEnvVar = "AWS_WEB_IDENTITY_TOKEN_FILE" awsCredentialsFileEnvVar = "AWS_SHARED_CREDENTIALS_FILE" + awsConfigFileEnvVar = "AWS_CONFIG_FILE" awsProfileEnvVar = "AWS_PROFILE" // ref. https://docs.aws.amazon.com/sdkref/latest/guide/settings-global.html @@ -95,6 +100,333 @@ func (*awsEnvironmentCredentialService) credentials(context.Context) (aws.Creden return creds, nil } +type ssoSessionDetails struct { + StartUrl string `json:"startUrl"` + Region string `json:"region"` + Name string + AccountID string + RoleName string + AccessToken string `json:"accessToken"` + ExpiresAt time.Time `json:"expiresAt"` + RegistrationExpiresAt time.Time `json:"registrationExpiresAt"` + RefreshToken string `json:"refreshToken"` + ClientId string `json:"clientId"` + ClientSecret string `json:"clientSecret"` +} + +type awsSSOCredentialsService struct { + Path string `json:"path,omitempty"` + SSOCachePath string `json:"cache_path,omitempty"` + + Profile string `json:"profile,omitempty"` + + logger logging.Logger + + creds aws.Credentials + + credentialsExpiresAt time.Time + + session *ssoSessionDetails +} + +func (cs *awsSSOCredentialsService) configPath() (string, error) { + if len(cs.Path) != 0 { + return cs.Path, nil + } + + if cs.Path = os.Getenv(awsConfigFileEnvVar); len(cs.Path) != 0 { + return cs.Path, nil + } + + homeDir, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("user home directory not found: %w", err) + } + + cs.Path = filepath.Join(homeDir, ".aws", "config") + + return cs.Path, nil +} +func (cs *awsSSOCredentialsService) ssoCachePath() (string, error) { + if len(cs.SSOCachePath) != 0 { + return cs.SSOCachePath, nil + } + + homeDir, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("user home directory not found: %w", err) + } + + cs.Path = filepath.Join(homeDir, ".aws", "sso", "cache") + + return cs.Path, nil +} + +func (cs *awsSSOCredentialsService) cacheKeyFileName() (string, error) { + + val := cs.session.StartUrl + if cs.session.Name != "" { + val = cs.session.Name + } + + hash := sha1.New() + hash.Write([]byte(val)) + cacheKey := hex.EncodeToString(hash.Sum(nil)) + + return cacheKey + ".json", nil +} + +func (cs *awsSSOCredentialsService) loadSSOCredentials() error { + ssoCachePath, err := cs.ssoCachePath() + if err != nil { + return fmt.Errorf("failed to get sso cache path: %w", err) + } + + cacheKeyFile, err := cs.cacheKeyFileName() + if err != nil { + return err + } + + cacheFile := path.Join(ssoCachePath, cacheKeyFile) + cache, err := os.ReadFile(cacheFile) + if err != nil { + return fmt.Errorf("failed to load cache file: %v", err) + } + + if err := json.Unmarshal(cache, &cs.session); err != nil { + return fmt.Errorf("failed to unmarshal cache file: %v", err) + } + + return nil + +} + +func (cs *awsSSOCredentialsService) loadSession() error { + configPath, err := cs.configPath() + if err != nil { + return fmt.Errorf("failed to get config path: %w", err) + } + config, err := ini.Load(configPath) + if err != nil { + return fmt.Errorf("failed to load config file: %w", err) + } + + section, err := config.GetSection("profile " + cs.Profile) + + if err != nil { + return fmt.Errorf("failed to find profile %s", cs.Profile) + } + + accountID, err := section.GetKey("sso_account_id") + if err != nil { + return fmt.Errorf("failed to find sso_account_id key in profile %s", cs.Profile) + } + + region, err := section.GetKey("region") + if err != nil { + return fmt.Errorf("failed to find region key in profile %s", cs.Profile) + } + + roleName, err := section.GetKey("sso_role_name") + if err != nil { + return fmt.Errorf("failed to find sso_role_name key in profile %s", cs.Profile) + } + + ssoSession, err := section.GetKey("sso_session") + if err != nil { + return fmt.Errorf("failed to find sso_session key in profile %s", cs.Profile) + } + + sessionName := ssoSession.Value() + + session, err := config.GetSection("sso-session " + sessionName) + if err != nil { + return fmt.Errorf("failed to find sso-session %s", sessionName) + } + + startUrl, err := session.GetKey("sso_start_url") + if err != nil { + return fmt.Errorf("failed to find sso_start_url key in sso-session %s", sessionName) + } + + cs.session = &ssoSessionDetails{ + StartUrl: startUrl.Value(), + Name: sessionName, + AccountID: accountID.Value(), + Region: region.Value(), + RoleName: roleName.Value(), + } + + return nil +} + +func (cs *awsSSOCredentialsService) tryRefreshToken() error { + // Check if refresh token is empty + if cs.session.RefreshToken == "" { + return errors.New("refresh token is empty") + } + + // Use the refresh token to get a new access token + // using the clientId, clientSecret and refreshToken from the loaded token + // return the new token + // if error, return error + + type refreshTokenRequest struct { + ClientId string `json:"clientId"` + ClientSecret string `json:"clientSecret"` + RefreshToken string `json:"refreshToken"` + GrantType string `json:"grantType"` + } + + data := refreshTokenRequest{ + ClientId: cs.session.ClientId, + ClientSecret: cs.session.ClientSecret, + RefreshToken: cs.session.RefreshToken, + GrantType: "refresh_token", + } + + body, err := json.Marshal(data) + if err != nil { + return fmt.Errorf("failed to marshal refresh token request: %v", err) + } + + endpoint := fmt.Sprintf("https://oidc.%s.amazonaws.com/token", cs.session.Region) + r, err := http.NewRequest("POST", endpoint, bytes.NewReader(body)) + if err != nil { + return fmt.Errorf("failed to create new request: %v", err) + } + + r.Header.Add("Content-Type", "application/json") + c := &http.Client{} + resp, err := c.Do(r) + if err != nil { + return fmt.Errorf("failed to do request: %v", err) + } + defer resp.Body.Close() + + type refreshTokenResponse struct { + AccessToken string `json:"accessToken"` + ExpiresIn int `json:"expiresIn"` + RefreshToken string `json:"refreshToken"` + } + + refreshedToken := refreshTokenResponse{} + + if err := json.NewDecoder(resp.Body).Decode(&refreshedToken); err != nil { + return fmt.Errorf("failed to decode response: %v", err) + } + + cs.session.AccessToken = refreshedToken.AccessToken + cs.session.ExpiresAt = time.Now().Add(time.Duration(refreshedToken.ExpiresIn) * time.Second) + cs.session.RefreshToken = refreshedToken.RefreshToken + + return nil +} + +func (cs *awsSSOCredentialsService) refreshCredentials() error { + url := fmt.Sprintf("https://portal.sso.%s.amazonaws.com/federation/credentials?account_id=%s&role_name=%s", cs.session.Region, cs.session.AccountID, cs.session.RoleName) + + req, err := http.NewRequest("GET", url, nil) + if err != nil { + return err + } + + req.Header.Set("Authorization", "Bearer "+cs.session.AccessToken) + req.Header.Set("Content-Type", "application/json") + + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return err + } + defer resp.Body.Close() + + type roleCredentials struct { + AccessKeyId string `json:"accessKeyId"` + SecretAccessKey string `json:"secretAccessKey"` + SessionToken string `json:"sessionToken"` + Expiration int64 `json:"expiration"` + } + type getRoleCredentialsResponse struct { + RoleCredentials roleCredentials `json:"roleCredentials"` + } + + var result getRoleCredentialsResponse + + if err := json.NewDecoder(resp.Body).Decode(&result); err != nil { + return fmt.Errorf("failed to decode response: %v", err) + } + + cs.creds = aws.Credentials{ + AccessKey: result.RoleCredentials.AccessKeyId, + SecretKey: result.RoleCredentials.SecretAccessKey, + SessionToken: result.RoleCredentials.SessionToken, + RegionName: cs.session.Region, + } + + cs.credentialsExpiresAt = time.Unix(result.RoleCredentials.Expiration, 0) + + return nil +} + +func (cs *awsSSOCredentialsService) loadProfile() { + if cs.Profile != "" { + return + } + + cs.Profile = os.Getenv(awsProfileEnvVar) + + if cs.Profile == "" { + cs.Profile = "default" + } + +} + +func (cs *awsSSOCredentialsService) init() error { + cs.loadProfile() + + if err := cs.loadSession(); err != nil { + return fmt.Errorf("failed to load session: %w", err) + } + + if err := cs.loadSSOCredentials(); err != nil { + return fmt.Errorf("failed to load SSO credentials: %w", err) + } + + // this enforces fetching credentials + cs.credentialsExpiresAt = time.Unix(0, 0) + return nil +} + +func (cs *awsSSOCredentialsService) credentials(context.Context) (aws.Credentials, error) { + if cs.session == nil { + if err := cs.init(); err != nil { + return aws.Credentials{}, err + } + } + + if cs.credentialsExpiresAt.Before(time.Now().Add(5 * time.Minute)) { + // Check if the sso token we have is still valid, + // if not, try to refresh it + if cs.session.ExpiresAt.Before(time.Now()) { + // we try and get a new token if we can + if cs.session.RegistrationExpiresAt.Before(time.Now()) { + return aws.Credentials{}, errors.New("cannot refresh token, registration expired") + } + + if err := cs.tryRefreshToken(); err != nil { + return aws.Credentials{}, fmt.Errorf("failed to refresh token: %w", err) + } + } + + if err := cs.refreshCredentials(); err != nil { + return aws.Credentials{}, fmt.Errorf("failed to refresh credentials: %w", err) + } + } + + return cs.creds, nil +} + // awsProfileCredentialService represents a credential provider for AWS that extracts credentials from the AWS // credentials file type awsProfileCredentialService struct { diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/azure.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/azure.go index ae00d48a7..9f7a16432 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/azure.go +++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/azure.go @@ -1,6 +1,9 @@ package rest import ( + "bytes" + "context" + "encoding/base64" "encoding/json" "errors" "fmt" @@ -17,6 +20,7 @@ var ( defaultResource = "https://storage.azure.com/" timeout = 5 * time.Second defaultAPIVersionForAppServiceMsi = "2019-08-01" + defaultKeyVaultAPIVersion = "7.4" ) // azureManagedIdentitiesToken holds a token for managed identities for Azure resources @@ -52,11 +56,7 @@ type azureManagedIdentitiesAuthPlugin struct { UseAppServiceMsi bool `json:"use_app_service_msi,omitempty"` } -func (ap *azureManagedIdentitiesAuthPlugin) NewClient(c Config) (*http.Client, error) { - if c.Type == "oci" { - return nil, errors.New("azure managed identities auth: OCI service not supported") - } - +func (ap *azureManagedIdentitiesAuthPlugin) setDefaults() { if ap.Endpoint == "" { identityEndpoint := os.Getenv("IDENTITY_ENDPOINT") if identityEndpoint != "" { @@ -79,6 +79,13 @@ func (ap *azureManagedIdentitiesAuthPlugin) NewClient(c Config) (*http.Client, e } } +} + +func (ap *azureManagedIdentitiesAuthPlugin) NewClient(c Config) (*http.Client, error) { + if c.Type == "oci" { + return nil, errors.New("azure managed identities auth: OCI service not supported") + } + ap.setDefaults() t, err := DefaultTLSConfig(c) if err != nil { return nil, err @@ -151,7 +158,6 @@ func azureManagedIdentitiesTokenRequest( if err != nil { return token, err } - return token, nil } @@ -178,3 +184,104 @@ func buildAzureManagedIdentitiesRequestPath( return endpoint + "?" + params.Encode() } + +type azureKeyVaultSignPlugin struct { + config azureKeyVaultConfig + tokener func() (string, error) +} + +func newKeyVaultSignPlugin(ap *azureManagedIdentitiesAuthPlugin, cfg *azureKeyVaultConfig) *azureKeyVaultSignPlugin { + resp := &azureKeyVaultSignPlugin{ + tokener: func() (string, error) { + resp, err := azureManagedIdentitiesTokenRequest( + ap.Endpoint, + ap.APIVersion, + cfg.URL.String(), + ap.ObjectID, + ap.ClientID, + ap.MiResID, + ap.UseAppServiceMsi) + if err != nil { + return "", err + } + return resp.AccessToken, nil + }, + config: *cfg, + } + return resp +} + +func (akv *azureKeyVaultSignPlugin) setDefaults() { + if akv.config.APIVersion == "" { + akv.config.APIVersion = defaultKeyVaultAPIVersion + } +} + +type kvRequest struct { + Alg string `json:"alg"` + Value string `json:"value"` +} + +type kvResponse struct { + KID string `json:"kid"` + Value string `json:"value"` +} + +// SignDigest() uses the Microsoft keyvault rest api to sign a byte digest +// https://learn.microsoft.com/en-us/rest/api/keyvault/keys/sign/sign +func (ap *azureKeyVaultSignPlugin) SignDigest(ctx context.Context, digest []byte) (string, error) { + tkn, err := ap.tokener() + if err != nil { + return "", err + } + if ap.config.URL.Host == "" { + return "", errors.New("keyvault host not set") + } + + signingURL := ap.config.URL.JoinPath("keys", ap.config.Key, ap.config.KeyVersion, "sign") + q := signingURL.Query() + q.Set("api-version", ap.config.APIVersion) + signingURL.RawQuery = q.Encode() + reqBody, err := json.Marshal(kvRequest{ + Alg: ap.config.Alg, + Value: base64.StdEncoding.EncodeToString(digest)}) + if err != nil { + return "", err + } + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, signingURL.String(), bytes.NewBuffer(reqBody)) + if err != nil { + return "", err + } + + req.Header.Add("Authorization", "Bearer "+tkn) + req.Header.Add("Content-Type", "application/json") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return "", err + } + + if resp.StatusCode != http.StatusOK { + if resp.Body != nil { + defer resp.Body.Close() + b, _ := io.ReadAll(resp.Body) + return "", fmt.Errorf("non 200 status code, got: %d. Body: %v", resp.StatusCode, string(b)) + } + return "", fmt.Errorf("non 200 status code from keyvault sign, got: %d", resp.StatusCode) + } + defer resp.Body.Close() + + respBytes, err := io.ReadAll(resp.Body) + if err != nil { + return "", errors.New("failed to read keyvault response body") + } + + var res kvResponse + err = json.Unmarshal(respBytes, &res) + if err != nil { + return "", fmt.Errorf("no valid keyvault response, got: %v", string(respBytes)) + } + + return res.Value, nil +} diff --git a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/rest.go b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/rest.go index e5d8e0f0d..f8be30af5 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/rest.go +++ b/vendor/github.com/open-policy-agent/opa/v1/plugins/rest/rest.go @@ -133,12 +133,12 @@ func (c *Config) authPrepare(req *http.Request, lookup AuthPluginLookupFunc) err // services. type Client struct { bytes *[]byte - json *interface{} + json *any config Config headers map[string]string authPluginLookup AuthPluginLookupFunc logger logging.Logger - loggerFields map[string]interface{} + loggerFields map[string]any distributedTacingOpts tracing.Options } @@ -234,7 +234,7 @@ func (c Client) Logger() logging.Logger { } // LoggerFields returns the fields used for log statements used by Client -func (c Client) LoggerFields() map[string]interface{} { +func (c Client) LoggerFields() map[string]any { return c.loggerFields } @@ -254,7 +254,7 @@ func (c Client) WithHeader(k, v string) Client { // WithJSON returns a shallow copy of the client with the JSON value set as the // message body to include the requests. This function sets the Content-Type // header. -func (c Client) WithJSON(body interface{}) Client { +func (c Client) WithJSON(body any) Client { c = c.WithHeader("Content-Type", "application/json") c.json = &body return c @@ -318,7 +318,7 @@ func (c Client) Do(ctx context.Context, method, path string) (*http.Response, er } if c.logger.GetLevel() >= logging.Debug { - c.loggerFields = map[string]interface{}{ + c.loggerFields = map[string]any{ "method": method, "url": url, "headers": withMaskedHeaders(req.Header), diff --git a/vendor/github.com/open-policy-agent/opa/v1/rego/rego.go b/vendor/github.com/open-policy-agent/opa/v1/rego/rego.go index fae39273a..ed51ed4e3 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/rego/rego.go +++ b/vendor/github.com/open-policy-agent/opa/v1/rego/rego.go @@ -99,7 +99,7 @@ type EvalContext struct { hasInput bool time time.Time seed io.Reader - rawInput *interface{} + rawInput *any parsedInput ast.Value metrics metrics.Metrics txn storage.Transaction @@ -128,7 +128,7 @@ type EvalContext struct { baseCache topdown.BaseCache } -func (e *EvalContext) RawInput() *interface{} { +func (e *EvalContext) RawInput() *any { return e.rawInput } @@ -184,7 +184,7 @@ func (e *EvalContext) Transaction() storage.Transaction { type EvalOption func(*EvalContext) // EvalInput configures the input for a Prepared Query's evaluation -func EvalInput(input interface{}) EvalOption { +func EvalInput(input any) EvalOption { return func(e *EvalContext) { e.rawInput = &input e.hasInput = true @@ -349,7 +349,7 @@ func EvalSortSets(yes bool) EvalOption { } } -// EvalCopyMaps causes the evaluator to copy `map[string]interface{}`s before returning them. +// EvalCopyMaps causes the evaluator to copy `map[string]any`s before returning them. func EvalCopyMaps(yes bool) EvalOption { return func(e *EvalContext) { e.copyMaps = yes @@ -391,9 +391,7 @@ func EvalNondeterministicBuiltins(yes bool) EvalOption { func (pq preparedQuery) Modules() map[string]*ast.Module { mods := make(map[string]*ast.Module) - for name, mod := range pq.r.parsedModules { - mods[name] = mod - } + maps.Copy(mods, pq.r.parsedModules) for _, b := range pq.r.bundles { for _, mod := range b.Modules { @@ -591,7 +589,7 @@ type Rego struct { parsedPackage *ast.Package imports []string parsedImports []*ast.Import - rawInput *interface{} + rawInput *any parsedInput ast.Value unknowns []string parsedUnknowns []*ast.Term @@ -636,7 +634,7 @@ type Rego struct { schemaSet *ast.SchemaSet target string // target type (wasm, rego, etc.) opa opa.EvalEngine - generateJSON func(*ast.Term, *EvalContext) (interface{}, error) + generateJSON func(*ast.Term, *EvalContext) (any, error) printHook print.Hook enablePrintStatements bool distributedTacingOpts tracing.Options @@ -904,7 +902,7 @@ func ParsedImports(imp []*ast.Import) func(r *Rego) { // Input returns an argument that sets the Rego input document. Input should be // a native Go value representing the input document. -func Input(x interface{}) func(r *Rego) { +func Input(x any) func(r *Rego) { return func(r *Rego) { r.rawInput = &x } @@ -1237,7 +1235,7 @@ func Target(t string) func(r *Rego) { } // GenerateJSON sets the AST to JSON converter for the results. -func GenerateJSON(f func(*ast.Term, *EvalContext) (interface{}, error)) func(r *Rego) { +func GenerateJSON(f func(*ast.Term, *EvalContext) (any, error)) func(r *Rego) { return func(r *Rego) { r.generateJSON = f } @@ -1985,7 +1983,7 @@ func (r *Rego) parseInput() (ast.Value, error) { return r.parseRawInput(r.rawInput, r.metrics) } -func (*Rego) parseRawInput(rawInput *interface{}, m metrics.Metrics) (ast.Value, error) { +func (*Rego) parseRawInput(rawInput *any, m metrics.Metrics) (ast.Value, error) { var input ast.Value if rawInput == nil { @@ -1998,7 +1996,7 @@ func (*Rego) parseRawInput(rawInput *interface{}, m metrics.Metrics) (ast.Value, rawPtr := util.Reference(rawInput) // roundtrip through json: this turns slices (e.g. []string, []bool) into - // []interface{}, the only array type ast.InterfaceToValue can work with + // []any, the only array type ast.InterfaceToValue can work with if err := util.RoundTrip(rawPtr); err != nil { return nil, err } @@ -2248,7 +2246,7 @@ func (r *Rego) eval(ctx context.Context, ectx *EvalContext) (ResultSet, error) { func (r *Rego) evalWasm(ctx context.Context, ectx *EvalContext) (ResultSet, error) { input := ectx.rawInput if ectx.parsedInput != nil { - i := interface{}(ectx.parsedInput) + i := any(ectx.parsedInput) input = &i } result, err := r.opa.Eval(ctx, opa.EvalOpts{ @@ -2311,17 +2309,18 @@ func (r *Rego) generateResult(qr topdown.QueryResult, ectx *EvalContext) (Result result := newResult() for k, term := range qr { - v, err := r.generateJSON(term, ectx) - if err != nil { - return result, err - } - if rw, ok := rewritten[k]; ok { k = rw } if isTermVar(k) || isTermWasmVar(k) || k.IsGenerated() || k.IsWildcard() { continue } + + v, err := r.generateJSON(term, ectx) + if err != nil { + return result, err + } + result.Bindings[string(k)] = v } @@ -2795,11 +2794,11 @@ type refResolver struct { r resolver.Resolver } -func iteration(x interface{}) bool { +func iteration(x any) bool { var stopped bool - vis := ast.NewGenericVisitor(func(x interface{}) bool { + vis := ast.NewGenericVisitor(func(x any) bool { switch x := x.(type) { case *ast.Term: if ast.IsComprehension(x.Value) { @@ -2831,6 +2830,9 @@ func iteration(x interface{}) bool { } func parseStringsToRefs(s []string) ([]ast.Ref, error) { + if len(s) == 0 { + return nil, nil + } refs := make([]ast.Ref, len(s)) for i := range refs { @@ -2895,7 +2897,7 @@ func newFunction(decl *Function, f topdown.BuiltinFunc) func(*Rego) { } } -func generateJSON(term *ast.Term, ectx *EvalContext) (interface{}, error) { +func generateJSON(term *ast.Term, ectx *EvalContext) (any, error) { return ast.JSONWithOpt(term.Value, ast.JSONOpt{ SortSets: ectx.sortSets, diff --git a/vendor/github.com/open-policy-agent/opa/v1/rego/resultset.go b/vendor/github.com/open-policy-agent/opa/v1/rego/resultset.go index cc0710426..983de2223 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/rego/resultset.go +++ b/vendor/github.com/open-policy-agent/opa/v1/rego/resultset.go @@ -12,7 +12,7 @@ type ResultSet []Result // Vars represents a collection of variable bindings. The keys are the variable // names and the values are the binding values. -type Vars map[string]interface{} +type Vars map[string]any // WithoutWildcards returns a copy of v with wildcard variables removed. func (v Vars) WithoutWildcards() Vars { @@ -46,12 +46,12 @@ type Location struct { // ExpressionValue defines the value of an expression in a Rego query. type ExpressionValue struct { - Value interface{} `json:"value"` - Text string `json:"text"` - Location *Location `json:"location"` + Value any `json:"value"` + Text string `json:"text"` + Location *Location `json:"location"` } -func newExpressionValue(expr *ast.Expr, value interface{}) *ExpressionValue { +func newExpressionValue(expr *ast.Expr, value any) *ExpressionValue { result := &ExpressionValue{ Value: value, } diff --git a/vendor/github.com/open-policy-agent/opa/v1/resolver/wasm/wasm.go b/vendor/github.com/open-policy-agent/opa/v1/resolver/wasm/wasm.go index c70daa8db..f23282b40 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/resolver/wasm/wasm.go +++ b/vendor/github.com/open-policy-agent/opa/v1/resolver/wasm/wasm.go @@ -17,7 +17,7 @@ import ( // New creates a new Resolver instance which is using the Wasm module // policy for the given entrypoint ref. -func New(entrypoints []ast.Ref, policy []byte, data interface{}) (*Resolver, error) { +func New(entrypoints []ast.Ref, policy []byte, data any) (*Resolver, error) { e, err := opa.LookupEngine("wasm") if err != nil { return nil, err @@ -97,9 +97,9 @@ func (r *Resolver) Eval(ctx context.Context, input resolver.Input) (resolver.Res return resolver.Result{}, fmt.Errorf("internal error: invalid entrypoint id %s", numValue) } - var in *interface{} + var in *any if input.Input != nil { - var str interface{} = []byte(input.Input.String()) + var str any = []byte(input.Input.String()) in = &str } @@ -122,12 +122,12 @@ func (r *Resolver) Eval(ctx context.Context, input resolver.Input) (resolver.Res } // SetData will update the external data for the Wasm instance. -func (r *Resolver) SetData(ctx context.Context, data interface{}) error { +func (r *Resolver) SetData(ctx context.Context, data any) error { return r.o.SetData(ctx, data) } // SetDataPath will set the provided data on the wasm instance at the specified path. -func (r *Resolver) SetDataPath(ctx context.Context, path []string, data interface{}) error { +func (r *Resolver) SetDataPath(ctx context.Context, path []string, data any) error { return r.o.SetDataPath(ctx, path, data) } @@ -168,7 +168,7 @@ func getResult(evalResult *opa.Result) (ast.Value, error) { return nil, err } - result := obj.Get(ast.StringTerm("result")) + result := obj.Get(ast.InternedStringTerm("result")) return result.Value, nil } diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/ast.go b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/ast.go index 9f14df0e5..27b8a7483 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/ast.go +++ b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/ast.go @@ -28,7 +28,7 @@ func (u *updateAST) Remove() bool { return u.remove } -func (u *updateAST) Set(v interface{}) { +func (u *updateAST) Set(v any) { if v, ok := v.(ast.Value); ok { u.value = v } else { @@ -36,7 +36,7 @@ func (u *updateAST) Set(v interface{}) { } } -func (u *updateAST) Value() interface{} { +func (u *updateAST) Value() any { return u.value } @@ -46,7 +46,7 @@ func (u *updateAST) Relative(path storage.Path) dataUpdate { return &cpy } -func (u *updateAST) Apply(v interface{}) interface{} { +func (u *updateAST) Apply(v any) any { if len(u.path) == 0 { return u.value } @@ -72,7 +72,7 @@ func (u *updateAST) Apply(v interface{}) interface{} { return newV } -func newUpdateAST(data interface{}, op storage.PatchOp, path storage.Path, idx int, value ast.Value) (*updateAST, error) { +func newUpdateAST(data any, op storage.PatchOp, path storage.Path, idx int, value ast.Value) (*updateAST, error) { switch data.(type) { case ast.Null, ast.Boolean, ast.Number, ast.String: @@ -154,7 +154,7 @@ func newUpdateArrayAST(data *ast.Array, op storage.PatchOp, path storage.Path, i } func newUpdateObjectAST(data ast.Object, op storage.PatchOp, path storage.Path, idx int, value ast.Value) (*updateAST, error) { - key := ast.StringTerm(path[idx]) + key := ast.InternedStringTerm(path[idx]) val := data.Get(key) if idx == len(path)-1 { @@ -174,7 +174,7 @@ func newUpdateObjectAST(data ast.Object, op storage.PatchOp, path storage.Path, return nil, errors.NewNotFoundError(path) } -func interfaceToValue(v interface{}) (ast.Value, error) { +func interfaceToValue(v any) (ast.Value, error) { if v, ok := v.(ast.Value); ok { return v, nil } @@ -200,7 +200,7 @@ func setInAst(data ast.Value, path storage.Path, value ast.Value) (ast.Value, er } func setInAstObject(obj ast.Object, path storage.Path, value ast.Value) (ast.Value, error) { - key := ast.StringTerm(path[0]) + key := ast.InternedStringTerm(path[0]) if len(path) == 1 { obj.Insert(key, ast.NewTerm(value)) @@ -256,7 +256,7 @@ func removeInAst(value ast.Value, path storage.Path) (ast.Value, error) { } func removeInAstObject(obj ast.Object, path storage.Path) (ast.Value, error) { - key := ast.StringTerm(path[0]) + key := ast.InternedStringTerm(path[0]) if len(path) == 1 { var items [][2]*ast.Term diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/inmem.go b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/inmem.go index c70d234d7..742d6c167 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/inmem.go +++ b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/inmem.go @@ -51,20 +51,20 @@ func NewWithOpts(opts ...Opt) storage.Store { if s.returnASTValuesOnRead { s.data = ast.NewObject() } else { - s.data = map[string]interface{}{} + s.data = map[string]any{} } return s } // NewFromObject returns a new in-memory store from the supplied data object. -func NewFromObject(data map[string]interface{}) storage.Store { +func NewFromObject(data map[string]any) storage.Store { return NewFromObjectWithOpts(data) } // NewFromObjectWithOpts returns a new in-memory store from the supplied data object, with the // options passed. -func NewFromObjectWithOpts(data map[string]interface{}, opts ...Opt) storage.Store { +func NewFromObjectWithOpts(data map[string]any, opts ...Opt) storage.Store { db := NewWithOpts(opts...) ctx := context.Background() txn, err := db.NewTransaction(ctx, storage.WriteParams) @@ -90,7 +90,7 @@ func NewFromReader(r io.Reader) storage.Store { // JSON serialized object, with extra options. This function is for test purposes. func NewFromReaderWithOpts(r io.Reader, opts ...Opt) storage.Store { d := util.NewJSONDecoder(r) - var data map[string]interface{} + var data map[string]any if err := d.Decode(&data); err != nil { panic(err) } @@ -101,7 +101,7 @@ type store struct { rmu sync.RWMutex // reader-writer lock wmu sync.Mutex // writer lock xid uint64 // last generated transaction id - data interface{} // raw or AST data + data any // raw or AST data policies map[string][]byte // raw policies triggers map[*handle]storage.TriggerConfig // registered triggers @@ -139,7 +139,7 @@ func (db *store) NewTransaction(_ context.Context, params ...storage.Transaction func (db *store) Truncate(ctx context.Context, txn storage.Transaction, params storage.TransactionParams, it storage.Iterator) error { var update *storage.Update var err error - mergedData := map[string]interface{}{} + mergedData := map[string]any{} underlying, err := db.underlying(txn) if err != nil { @@ -158,7 +158,7 @@ func (db *store) Truncate(ctx context.Context, txn storage.Transaction, params s return err } } else { - var value interface{} + var value any err = util.Unmarshal(update.Value, &value) if err != nil { return err @@ -304,7 +304,7 @@ func (db *store) Register(_ context.Context, txn storage.Transaction, config sto return h, nil } -func (db *store) Read(_ context.Context, txn storage.Transaction, path storage.Path) (interface{}, error) { +func (db *store) Read(_ context.Context, txn storage.Transaction, path storage.Path) (any, error) { underlying, err := db.underlying(txn) if err != nil { return nil, err @@ -318,7 +318,7 @@ func (db *store) Read(_ context.Context, txn storage.Transaction, path storage.P return v, nil } -func (db *store) Write(_ context.Context, txn storage.Transaction, op storage.PatchOp, path storage.Path, value interface{}) error { +func (db *store) Write(_ context.Context, txn storage.Transaction, op storage.PatchOp, path storage.Path, value any) error { underlying, err := db.underlying(txn) if err != nil { return err @@ -382,7 +382,7 @@ func (db *store) runOnCommitTriggers(ctx context.Context, txn storage.Transactio type illegalResolver struct{} -func (illegalResolver) Resolve(ref ast.Ref) (interface{}, error) { +func (illegalResolver) Resolve(ref ast.Ref) (any, error) { return nil, fmt.Errorf("illegal value: %v", ref) } @@ -412,35 +412,35 @@ func (db *store) underlying(txn storage.Transaction) (*transaction, error) { const rootMustBeObjectMsg = "root must be object" const rootCannotBeRemovedMsg = "root cannot be removed" -func invalidPatchError(f string, a ...interface{}) *storage.Error { +func invalidPatchError(f string, a ...any) *storage.Error { return &storage.Error{ Code: storage.InvalidPatchErr, Message: fmt.Sprintf(f, a...), } } -func mktree(path []string, value interface{}) (map[string]interface{}, error) { +func mktree(path []string, value any) (map[string]any, error) { if len(path) == 0 { // For 0 length path the value is the full tree. - obj, ok := value.(map[string]interface{}) + obj, ok := value.(map[string]any) if !ok { return nil, invalidPatchError(rootMustBeObjectMsg) } return obj, nil } - dir := map[string]interface{}{} + dir := map[string]any{} for i := len(path) - 1; i > 0; i-- { dir[path[i]] = value value = dir - dir = map[string]interface{}{} + dir = map[string]any{} } dir[path[0]] = value return dir, nil } -func lookup(path storage.Path, data map[string]interface{}) (interface{}, bool) { +func lookup(path storage.Path, data map[string]any) (any, bool) { if len(path) == 0 { return data, true } @@ -449,7 +449,7 @@ func lookup(path storage.Path, data map[string]interface{}) (interface{}, bool) if !ok { return nil, false } - obj, ok := value.(map[string]interface{}) + obj, ok := value.(map[string]any) if !ok { return nil, false } diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/txn.go b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/txn.go index f8a730391..28e68c20f 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/txn.go +++ b/vendor/github.com/open-policy-agent/opa/v1/storage/inmem/txn.go @@ -63,7 +63,7 @@ func (txn *transaction) ID() uint64 { return txn.xid } -func (txn *transaction) Write(op storage.PatchOp, path storage.Path, value interface{}) error { +func (txn *transaction) Write(op storage.PatchOp, path storage.Path, value any) error { if !txn.write { return &storage.Error{ @@ -129,7 +129,7 @@ func (txn *transaction) Write(op storage.PatchOp, path storage.Path, value inter return nil } -func (txn *transaction) updateRoot(op storage.PatchOp, value interface{}) error { +func (txn *transaction) updateRoot(op storage.PatchOp, value any) error { if op == storage.RemoveOp { return invalidPatchError(rootCannotBeRemovedMsg) } @@ -150,7 +150,7 @@ func (txn *transaction) updateRoot(op storage.PatchOp, value interface{}) error value: valueAST, } } else { - if _, ok := value.(map[string]interface{}); !ok { + if _, ok := value.(map[string]any); !ok { return invalidPatchError(rootMustBeObjectMsg) } @@ -194,14 +194,14 @@ func (txn *transaction) Commit() (result storage.TriggerEvent) { return result } -func pointer(v interface{}, path storage.Path) (interface{}, error) { +func pointer(v any, path storage.Path) (any, error) { if v, ok := v.(ast.Value); ok { return ptr.ValuePtr(v, path) } return ptr.Ptr(v, path) } -func deepcpy(v interface{}) interface{} { +func deepcpy(v any) any { if v, ok := v.(ast.Value); ok { var cpy ast.Value @@ -217,7 +217,7 @@ func deepcpy(v interface{}) interface{} { return deepcopy.DeepCopy(v) } -func (txn *transaction) Read(path storage.Path) (interface{}, error) { +func (txn *transaction) Read(path storage.Path) (any, error) { if !txn.write { return pointer(txn.db.data, path) @@ -313,10 +313,10 @@ func (txn *transaction) DeletePolicy(id string) error { type dataUpdate interface { Path() storage.Path Remove() bool - Apply(interface{}) interface{} + Apply(any) any Relative(path storage.Path) dataUpdate - Set(interface{}) - Value() interface{} + Set(any) + Value() any } // update contains state associated with an update to be applied to the @@ -324,10 +324,10 @@ type dataUpdate interface { type updateRaw struct { path storage.Path // data path modified by update remove bool // indicates whether update removes the value at path - value interface{} // value to add/replace at path (ignored if remove is true) + value any // value to add/replace at path (ignored if remove is true) } -func (db *store) newUpdate(data interface{}, op storage.PatchOp, path storage.Path, idx int, value interface{}) (dataUpdate, error) { +func (db *store) newUpdate(data any, op storage.PatchOp, path storage.Path, idx int, value any) (dataUpdate, error) { if db.returnASTValuesOnRead { astData, err := interfaceToValue(data) if err != nil { @@ -342,7 +342,7 @@ func (db *store) newUpdate(data interface{}, op storage.PatchOp, path storage.Pa return newUpdateRaw(data, op, path, idx, value) } -func newUpdateRaw(data interface{}, op storage.PatchOp, path storage.Path, idx int, value interface{}) (dataUpdate, error) { +func newUpdateRaw(data any, op storage.PatchOp, path storage.Path, idx int, value any) (dataUpdate, error) { switch data.(type) { case nil, bool, json.Number, string: @@ -350,10 +350,10 @@ func newUpdateRaw(data interface{}, op storage.PatchOp, path storage.Path, idx i } switch data := data.(type) { - case map[string]interface{}: + case map[string]any: return newUpdateObject(data, op, path, idx, value) - case []interface{}: + case []any: return newUpdateArray(data, op, path, idx, value) } @@ -363,14 +363,14 @@ func newUpdateRaw(data interface{}, op storage.PatchOp, path storage.Path, idx i } } -func newUpdateArray(data []interface{}, op storage.PatchOp, path storage.Path, idx int, value interface{}) (dataUpdate, error) { +func newUpdateArray(data []any, op storage.PatchOp, path storage.Path, idx int, value any) (dataUpdate, error) { if idx == len(path)-1 { if path[idx] == "-" || path[idx] == strconv.Itoa(len(data)) { if op != storage.AddOp { return nil, invalidPatchError("%v: invalid patch path", path) } - cpy := make([]interface{}, len(data)+1) + cpy := make([]any, len(data)+1) copy(cpy, data) cpy[len(data)] = value return &updateRaw{path[:len(path)-1], false, cpy}, nil @@ -383,20 +383,20 @@ func newUpdateArray(data []interface{}, op storage.PatchOp, path storage.Path, i switch op { case storage.AddOp: - cpy := make([]interface{}, len(data)+1) + cpy := make([]any, len(data)+1) copy(cpy[:pos], data[:pos]) copy(cpy[pos+1:], data[pos:]) cpy[pos] = value return &updateRaw{path[:len(path)-1], false, cpy}, nil case storage.RemoveOp: - cpy := make([]interface{}, len(data)-1) + cpy := make([]any, len(data)-1) copy(cpy[:pos], data[:pos]) copy(cpy[pos:], data[pos+1:]) return &updateRaw{path[:len(path)-1], false, cpy}, nil default: - cpy := make([]interface{}, len(data)) + cpy := make([]any, len(data)) copy(cpy, data) cpy[pos] = value return &updateRaw{path[:len(path)-1], false, cpy}, nil @@ -411,7 +411,7 @@ func newUpdateArray(data []interface{}, op storage.PatchOp, path storage.Path, i return newUpdateRaw(data[pos], op, path, idx+1, value) } -func newUpdateObject(data map[string]interface{}, op storage.PatchOp, path storage.Path, idx int, value interface{}) (dataUpdate, error) { +func newUpdateObject(data map[string]any, op storage.PatchOp, path storage.Path, idx int, value any) (dataUpdate, error) { if idx == len(path)-1 { switch op { @@ -438,7 +438,7 @@ func (u *updateRaw) Path() storage.Path { return u.path } -func (u *updateRaw) Apply(data interface{}) interface{} { +func (u *updateRaw) Apply(data any) any { if len(u.path) == 0 { return u.value } @@ -448,17 +448,17 @@ func (u *updateRaw) Apply(data interface{}) interface{} { } key := u.path[len(u.path)-1] if u.remove { - obj := parent.(map[string]interface{}) + obj := parent.(map[string]any) delete(obj, key) return data } switch parent := parent.(type) { - case map[string]interface{}: + case map[string]any: if parent == nil { - parent = make(map[string]interface{}, 1) + parent = make(map[string]any, 1) } parent[key] = u.value - case []interface{}: + case []any: idx, err := strconv.Atoi(key) if err != nil { panic(err) @@ -468,11 +468,11 @@ func (u *updateRaw) Apply(data interface{}) interface{} { return data } -func (u *updateRaw) Set(v interface{}) { +func (u *updateRaw) Set(v any) { u.value = v } -func (u *updateRaw) Value() interface{} { +func (u *updateRaw) Value() any { return u.value } diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/interface.go b/vendor/github.com/open-policy-agent/opa/v1/storage/interface.go index 94e02a47b..1d0356706 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/storage/interface.go +++ b/vendor/github.com/open-policy-agent/opa/v1/storage/interface.go @@ -25,10 +25,10 @@ type Store interface { NewTransaction(context.Context, ...TransactionParams) (Transaction, error) // Read is called to fetch a document referred to by path. - Read(context.Context, Transaction, Path) (interface{}, error) + Read(context.Context, Transaction, Path) (any, error) // Write is called to modify a document referred to by path. - Write(context.Context, Transaction, PatchOp, Path, interface{}) error + Write(context.Context, Transaction, PatchOp, Path, any) error // Commit is called to finish the transaction. If Commit returns an error, the // transaction must be automatically aborted by the Store implementation. @@ -67,18 +67,18 @@ type TransactionParams struct { // Context is a simple container for key/value pairs. type Context struct { - values map[interface{}]interface{} + values map[any]any } // NewContext returns a new context object. func NewContext() *Context { return &Context{ - values: map[interface{}]interface{}{}, + values: map[any]any{}, } } // Get returns the key value in the context. -func (ctx *Context) Get(key interface{}) interface{} { +func (ctx *Context) Get(key any) any { if ctx == nil { return nil } @@ -86,7 +86,7 @@ func (ctx *Context) Get(key interface{}) interface{} { } // Put adds a key/value pair to the context. -func (ctx *Context) Put(key, value interface{}) { +func (ctx *Context) Put(key, value any) { ctx.values[key] = value } @@ -130,7 +130,7 @@ const ( // interface which may be used if the backend does not support writes. type WritesNotSupported struct{} -func (WritesNotSupported) Write(context.Context, Transaction, PatchOp, Path, interface{}) error { +func (WritesNotSupported) Write(context.Context, Transaction, PatchOp, Path, any) error { return writesNotSupportedError() } @@ -176,7 +176,7 @@ type PolicyEvent struct { // DataEvent describes a change to a base data document. type DataEvent struct { Path Path - Data interface{} + Data any Removed bool } diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/internal/errors/errors.go b/vendor/github.com/open-policy-agent/opa/v1/storage/internal/errors/errors.go index 778f30d1f..d13fff50f 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/storage/internal/errors/errors.go +++ b/vendor/github.com/open-policy-agent/opa/v1/storage/internal/errors/errors.go @@ -27,7 +27,7 @@ func NewNotFoundErrorWithHint(path storage.Path, hint string) *storage.Error { } } -func NewNotFoundErrorf(f string, a ...interface{}) *storage.Error { +func NewNotFoundErrorf(f string, a ...any) *storage.Error { msg := fmt.Sprintf(f, a...) return &storage.Error{ Code: storage.NotFoundErr, diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/internal/ptr/ptr.go b/vendor/github.com/open-policy-agent/opa/v1/storage/internal/ptr/ptr.go index 902e73546..c5e380af0 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/storage/internal/ptr/ptr.go +++ b/vendor/github.com/open-policy-agent/opa/v1/storage/internal/ptr/ptr.go @@ -13,17 +13,17 @@ import ( "github.com/open-policy-agent/opa/v1/storage/internal/errors" ) -func Ptr(data interface{}, path storage.Path) (interface{}, error) { +func Ptr(data any, path storage.Path) (any, error) { node := data for i := range path { key := path[i] switch curr := node.(type) { - case map[string]interface{}: + case map[string]any: var ok bool if node, ok = curr[key]; !ok { return nil, errors.NewNotFoundError(path) } - case []interface{}: + case []any: pos, err := ValidateArrayIndex(curr, key, path) if err != nil { return nil, err @@ -70,7 +70,7 @@ func ValuePtr(data ast.Value, path storage.Path) (ast.Value, error) { return node, nil } -func ValidateArrayIndex(arr []interface{}, s string, path storage.Path) (int, error) { +func ValidateArrayIndex(arr []any, s string, path storage.Path) (int, error) { idx, ok := isInt(s) if !ok { return 0, errors.NewNotFoundErrorWithHint(path, errors.ArrayIndexTypeMsg) @@ -89,7 +89,7 @@ func ValidateASTArrayIndex(arr *ast.Array, s string, path storage.Path) (int, er // ValidateArrayIndexForWrite also checks that `s` is a valid way to address an // array element like `ValidateArrayIndex`, but returns a `resource_conflict` error // if it is not. -func ValidateArrayIndexForWrite(arr []interface{}, s string, i int, path storage.Path) (int, error) { +func ValidateArrayIndexForWrite(arr []any, s string, i int, path storage.Path) (int, error) { idx, ok := isInt(s) if !ok { return 0, errors.NewWriteConflictError(path[:i-1]) @@ -102,12 +102,12 @@ func isInt(s string) (int, bool) { return idx, err == nil } -func inRange(i int, arr interface{}, path storage.Path) (int, error) { +func inRange(i int, arr any, path storage.Path) (int, error) { var arrLen int switch v := arr.(type) { - case []interface{}: + case []any: arrLen = len(v) case *ast.Array: arrLen = v.Len() diff --git a/vendor/github.com/open-policy-agent/opa/v1/storage/storage.go b/vendor/github.com/open-policy-agent/opa/v1/storage/storage.go index 34305f291..ecc382994 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/storage/storage.go +++ b/vendor/github.com/open-policy-agent/opa/v1/storage/storage.go @@ -24,7 +24,7 @@ func NewTransactionOrDie(ctx context.Context, store Store, params ...Transaction // ReadOne is a convenience function to read a single value from the provided Store. It // will create a new Transaction to perform the read with, and clean up after itself // should an error occur. -func ReadOne(ctx context.Context, store Store, path Path) (interface{}, error) { +func ReadOne(ctx context.Context, store Store, path Path) (any, error) { txn, err := store.NewTransaction(ctx) if err != nil { return nil, err @@ -37,7 +37,7 @@ func ReadOne(ctx context.Context, store Store, path Path) (interface{}, error) { // WriteOne is a convenience function to write a single value to the provided Store. It // will create a new Transaction to perform the write with, and clean up after itself // should an error occur. -func WriteOne(ctx context.Context, store Store, op PatchOp, path Path, value interface{}) error { +func WriteOne(ctx context.Context, store Store, op PatchOp, path Path, value any) error { txn, err := store.NewTransaction(ctx, WriteParams) if err != nil { return err @@ -74,10 +74,10 @@ func MakeDir(ctx context.Context, store Store, txn Transaction, path Path) error return err } - return store.Write(ctx, txn, AddOp, path, map[string]interface{}{}) + return store.Write(ctx, txn, AddOp, path, map[string]any{}) } - if _, ok := node.(map[string]interface{}); ok { + if _, ok := node.(map[string]any); ok { return nil } @@ -122,7 +122,7 @@ func NonEmpty(ctx context.Context, store Store, txn Transaction) func([]string) if err != nil && !IsNotFound(err) { return false, err } else if err == nil { - if _, ok := val.(map[string]interface{}); ok { + if _, ok := val.(map[string]any); ok { return false, nil } if _, ok := val.(ast.Object); ok { diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/aggregates.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/aggregates.go index fb59fd07f..05a2fdca9 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/aggregates.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/aggregates.go @@ -28,6 +28,22 @@ func builtinCount(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e func builtinSum(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { switch a := operands[0].Value.(type) { case *ast.Array: + // Fast path for arrays of integers + is := 0 + nonInts := a.Until(func(x *ast.Term) bool { + if n, ok := x.Value.(ast.Number); ok { + if i, ok := n.Int(); ok { + is += i + return false + } + } + return true + }) + if !nonInts { + return iter(ast.InternedIntNumberTerm(is)) + } + + // Non-integer values found, so we need to sum as floats. sum := big.NewFloat(0) err := a.Iter(func(x *ast.Term) error { n, ok := x.Value.(ast.Number) @@ -42,6 +58,21 @@ func builtinSum(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) err } return iter(ast.NewTerm(builtins.FloatToNumber(sum))) case ast.Set: + // Fast path for sets of integers + is := 0 + nonInts := a.Until(func(x *ast.Term) bool { + if n, ok := x.Value.(ast.Number); ok { + if i, ok := n.Int(); ok { + is += i + return false + } + } + return true + }) + if !nonInts { + return iter(ast.InternedIntNumberTerm(is)) + } + sum := big.NewFloat(0) err := a.Iter(func(x *ast.Term) error { n, ok := x.Value.(ast.Number) diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/arithmetic.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/arithmetic.go index acfbba3c7..650234019 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/arithmetic.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/arithmetic.go @@ -70,10 +70,8 @@ func builtinPlus(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) er return iter(ast.InternedIntNumberTerm(x + y)) } - f, err := arithPlus(builtins.NumberToFloat(n1), builtins.NumberToFloat(n2)) - if err != nil { - return err - } + f := new(big.Float).Add(builtins.NumberToFloat(n1), builtins.NumberToFloat(n2)) + return iter(ast.NewTerm(builtins.FloatToNumber(f))) } @@ -94,25 +92,11 @@ func builtinMultiply(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term return iter(ast.InternedIntNumberTerm(x * y)) } - f, err := arithMultiply(builtins.NumberToFloat(n1), builtins.NumberToFloat(n2)) - if err != nil { - return err - } + f := new(big.Float).Mul(builtins.NumberToFloat(n1), builtins.NumberToFloat(n2)) + return iter(ast.NewTerm(builtins.FloatToNumber(f))) } -func arithPlus(a, b *big.Float) (*big.Float, error) { - return new(big.Float).Add(a, b), nil -} - -func arithMinus(a, b *big.Float) (*big.Float, error) { - return new(big.Float).Sub(a, b), nil -} - -func arithMultiply(a, b *big.Float) (*big.Float, error) { - return new(big.Float).Mul(a, b), nil -} - func arithDivide(a, b *big.Float) (*big.Float, error) { i, acc := b.Int64() if acc == big.Exact && i == 0 { @@ -174,10 +158,8 @@ func builtinMinus(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e return iter(ast.InternedIntNumberTerm(x - y)) } - f, err := arithMinus(builtins.NumberToFloat(n1), builtins.NumberToFloat(n2)) - if err != nil { - return err - } + f := new(big.Float).Sub(builtins.NumberToFloat(n1), builtins.NumberToFloat(n2)) + return iter(ast.NewTerm(builtins.FloatToNumber(f))) } @@ -185,7 +167,11 @@ func builtinMinus(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e s2, ok4 := operands[1].Value.(ast.Set) if ok3 && ok4 { - return iter(ast.NewTerm(s1.Diff(s2))) + diff := s1.Diff(s2) + if diff.Len() == 0 { + return iter(ast.InternedEmptySet) + } + return iter(ast.NewTerm(diff)) } if !ok1 && !ok3 { diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/binary.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/binary.go index 6f7ebaf40..05050dbf7 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/binary.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/binary.go @@ -21,7 +21,12 @@ func builtinBinaryAnd(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter return err } - return iter(ast.NewTerm(s1.Intersect(s2))) + i := s1.Intersect(s2) + if i.Len() == 0 { + return iter(ast.InternedEmptySet) + } + + return iter(ast.NewTerm(i)) } func builtinBinaryOr(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go index 8c7bfbd17..9dd55f1ba 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/bindings.go @@ -93,7 +93,7 @@ func (u *bindings) plugNamespaced(a *ast.Term, caller *bindings) *ast.Term { } cpy := *a arr := make([]*ast.Term, v.Len()) - for i := 0; i < len(arr); i++ { + for i := range arr { arr[i] = u.plugNamespaced(v.Elem(i), caller) } cpy.Value = ast.NewArray(arr...) @@ -119,7 +119,7 @@ func (u *bindings) plugNamespaced(a *ast.Term, caller *bindings) *ast.Term { case ast.Ref: cpy := *a ref := make(ast.Ref, len(v)) - for i := 0; i < len(ref); i++ { + for i := range ref { ref[i] = u.plugNamespaced(v[i], caller) } cpy.Value = ref @@ -185,7 +185,7 @@ func (u *bindings) namespaceVar(v *ast.Term, caller *bindings) *ast.Term { // Root documents (i.e., data, input) should never be namespaced because they // are globally unique. if !ast.RootDocumentNames.Contains(v) { - return ast.NewTerm(ast.Var(string(name) + strconv.FormatUint(u.id, 10))) + return ast.VarTerm(string(name) + strconv.FormatUint(u.id, 10)) } } return v @@ -212,7 +212,7 @@ type namespacingVisitor struct { caller *bindings } -func (vis namespacingVisitor) Visit(x interface{}) bool { +func (vis namespacingVisitor) Visit(x any) bool { switch x := x.(type) { case *ast.ArrayComprehension: x.Term = vis.namespaceTerm(x.Term) @@ -254,7 +254,7 @@ func (vis namespacingVisitor) namespaceTerm(a *ast.Term) *ast.Term { } cpy := *a arr := make([]*ast.Term, v.Len()) - for i := 0; i < len(arr); i++ { + for i := range arr { arr[i] = vis.namespaceTerm(v.Elem(i)) } cpy.Value = ast.NewArray(arr...) @@ -280,7 +280,7 @@ func (vis namespacingVisitor) namespaceTerm(a *ast.Term) *ast.Term { case ast.Ref: cpy := *a ref := make(ast.Ref, len(v)) - for i := 0; i < len(ref); i++ { + for i := range ref { ref[i] = vis.namespaceTerm(v[i]) } cpy.Value = ref diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/builtins/builtins.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/builtins/builtins.go index 9fcaea4a2..f56cd7826 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/builtins/builtins.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/builtins/builtins.go @@ -18,15 +18,15 @@ import ( // Cache defines the built-in cache used by the top-down evaluation. The keys // must be comparable and should not be of type string. -type Cache map[interface{}]interface{} +type Cache map[any]any // Put updates the cache for the named built-in. -func (c Cache) Put(k, v interface{}) { +func (c Cache) Put(k, v any) { c[k] = v } // Get returns the cached value for k. -func (c Cache) Get(k interface{}) (interface{}, bool) { +func (c Cache) Get(k any) (any, bool) { v, ok := c[k] return v, ok } @@ -39,7 +39,7 @@ type NDBCache map[string]ast.Object func (c NDBCache) AsValue() ast.Value { out := ast.NewObject() for bname, obj := range c { - out.Insert(ast.StringTerm(bname), ast.NewTerm(obj)) + out.Insert(ast.InternedStringTerm(bname), ast.NewTerm(obj)) } return out } @@ -76,7 +76,7 @@ func (c NDBCache) MarshalJSON() ([]byte, error) { func (c *NDBCache) UnmarshalJSON(data []byte) error { out := map[string]ast.Object{} - var incoming interface{} + var incoming any // Note: We use util.Unmarshal instead of json.Unmarshal to get // correct deserialization of number types. @@ -120,7 +120,7 @@ func (err ErrOperand) Error() string { } // NewOperandErr returns a generic operand error. -func NewOperandErr(pos int, f string, a ...interface{}) error { +func NewOperandErr(pos int, f string, a ...any) error { f = fmt.Sprintf("operand %v ", pos) + f return ErrOperand(fmt.Sprintf(f, a...)) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/cache.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/cache.go index 42fb6ad3f..a6c89b453 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/cache.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/cache.go @@ -5,6 +5,8 @@ package topdown import ( + "slices" + "github.com/open-policy-agent/opa/v1/ast" "github.com/open-policy-agent/opa/v1/util" ) @@ -218,16 +220,17 @@ func (s *refStack) Push(refs []ast.Ref) { } func (s *refStack) Pop() { + if s == nil { + return + } s.sl = s.sl[:len(s.sl)-1] } func (s *refStack) Prefixed(ref ast.Ref) bool { if s != nil { for i := len(s.sl) - 1; i >= 0; i-- { - for j := range s.sl[i].refs { - if ref.HasPrefix(s.sl[i].refs[j]) { - return true - } + if slices.ContainsFunc(s.sl[i].refs, ref.HasPrefix) { + return true } } } @@ -346,6 +349,10 @@ func (s *functionMocksStack) Put(el frame) { } func (s *functionMocksStack) Get(f ast.Ref) (*ast.Term, bool) { + if s == nil { + return nil, false + } + current := *s.stack[len(s.stack)-1] for i := len(current) - 1; i >= 0; i-- { if r, ok := current[i][f.String()]; ok { diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/cidr.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/cidr.go index 00c034656..c404d5100 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/cidr.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/cidr.go @@ -6,6 +6,7 @@ import ( "fmt" "math/big" "net" + "slices" "sort" cidrMerge "github.com/open-policy-agent/opa/internal/cidr/merge" @@ -392,7 +393,7 @@ func mergeCIDRs(ranges cidrBlockRanges) cidrBlockRanges { ranges[i-1] = &cidrBlockRange{First: &firstIPRange, Last: &lastIPRange, Network: nil} // Delete ranges[i] since merged with the previous. - ranges = append(ranges[:i], ranges[i+1:]...) + ranges = slices.Delete(ranges, i, i+1) } } return ranges diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/copypropagation.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/copypropagation.go index 9f4beca54..e582205f4 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/copypropagation.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/copypropagation.go @@ -233,7 +233,7 @@ type bindingPlugTransform struct { pctx *plugContext } -func (t bindingPlugTransform) Transform(x interface{}) (interface{}, error) { +func (t bindingPlugTransform) Transform(x any) (any, error) { switch x := x.(type) { case ast.Var: return t.plugBindingsVar(t.pctx, x), nil @@ -385,11 +385,11 @@ type binding struct { k, v ast.Value } -func containedIn(value ast.Value, x interface{}) bool { +func containedIn(value ast.Value, x any) bool { var stop bool var vis *ast.GenericVisitor - vis = ast.NewGenericVisitor(func(x interface{}) bool { + vis = ast.NewGenericVisitor(func(x any) bool { switch x := x.(type) { case *ast.Every: // skip body vis.Walk(x.Key) diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/unionfind.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/unionfind.go index 528c83a0f..cac2a3009 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/unionfind.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/copypropagation/unionfind.go @@ -82,10 +82,10 @@ func (uf *unionFind) Merge(a, b ast.Value) (*unionFindRoot, bool) { func (uf *unionFind) String() string { o := struct { - Roots map[string]interface{} + Roots map[string]any Parents map[string]ast.Value }{ - map[string]interface{}{}, + map[string]any{}, map[string]ast.Value{}, } diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/crypto.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/crypto.go index dafbac785..0fe6183c7 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/crypto.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/crypto.go @@ -97,19 +97,14 @@ func builtinCryptoX509ParseAndVerifyCertificates(_ BuiltinContext, operands []*a return err } - invalid := ast.ArrayTerm( - ast.InternedBooleanTerm(false), - ast.NewTerm(ast.NewArray()), - ) - certs, err := getX509CertsFromString(string(input)) if err != nil { - return iter(invalid) + return iter(ast.ArrayTerm(ast.InternedBooleanTerm(false), ast.InternedEmptyArray)) } verified, err := verifyX509CertificateChain(certs, x509.VerifyOptions{}) if err != nil { - return iter(invalid) + return iter(ast.ArrayTerm(ast.InternedBooleanTerm(false), ast.InternedEmptyArray)) } value, err := ast.InterfaceToValue(extendCertificates(verified)) @@ -117,10 +112,7 @@ func builtinCryptoX509ParseAndVerifyCertificates(_ BuiltinContext, operands []*a return err } - valid := ast.ArrayTerm( - ast.InternedBooleanTerm(true), - ast.NewTerm(value), - ) + valid := ast.ArrayTerm(ast.InternedBooleanTerm(true), ast.NewTerm(value)) return iter(valid) } @@ -156,10 +148,7 @@ func builtinCryptoX509ParseAndVerifyCertificatesWithOptions(_ BuiltinContext, op certs, err := getX509CertsFromString(string(input)) if err != nil { - return iter(ast.ArrayTerm( - ast.InternedBooleanTerm(false), - ast.NewTerm(ast.NewArray()), - )) + return iter(ast.ArrayTerm(ast.InternedBooleanTerm(false), ast.InternedEmptyArray)) } // Collect the cert verification options @@ -170,10 +159,7 @@ func builtinCryptoX509ParseAndVerifyCertificatesWithOptions(_ BuiltinContext, op verified, err := verifyX509CertificateChain(certs, verifyOpt) if err != nil { - return iter(ast.ArrayTerm( - ast.InternedBooleanTerm(false), - ast.NewTerm(ast.NewArray()), - )) + return iter(ast.ArrayTerm(ast.InternedBooleanTerm(false), ast.InternedEmptyArray)) } value, err := ast.InterfaceToValue(verified) @@ -181,10 +167,7 @@ func builtinCryptoX509ParseAndVerifyCertificatesWithOptions(_ BuiltinContext, op return err } - return iter(ast.ArrayTerm( - ast.InternedBooleanTerm(true), - ast.NewTerm(value), - )) + return iter(ast.ArrayTerm(ast.InternedBooleanTerm(true), ast.NewTerm(value))) } func extractVerifyOpts(options ast.Object) (verifyOpt x509.VerifyOptions, err error) { @@ -329,7 +312,7 @@ func builtinCryptoX509ParseCertificateRequest(_ BuiltinContext, operands []*ast. return err } - var x interface{} + var x any if err := util.UnmarshalJSON(bs, &x); err != nil { return err } @@ -343,7 +326,7 @@ func builtinCryptoX509ParseCertificateRequest(_ BuiltinContext, operands []*ast. } func builtinCryptoJWKFromPrivateKey(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { - var x interface{} + var x any a := operands[0].Value input, err := builtins.StringOperand(a, 1) @@ -419,7 +402,7 @@ func builtinCryptoParsePrivateKeys(_ BuiltinContext, operands []*ast.Term, iter } if len(rawKeys) == 0 { - return iter(emptyArr) + return iter(ast.InternedEmptyArray) } bs, err := json.Marshal(rawKeys) @@ -427,7 +410,7 @@ func builtinCryptoParsePrivateKeys(_ BuiltinContext, operands []*ast.Term, iter return err } - var x interface{} + var x any if err := util.UnmarshalJSON(bs, &x); err != nil { return err } diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/encoding.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/encoding.go index a27a9c245..6e1e0e8bd 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/encoding.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/encoding.go @@ -128,7 +128,7 @@ func builtinJSONUnmarshal(_ BuiltinContext, operands []*ast.Term, iter func(*ast return err } - var x interface{} + var x any if err := util.UnmarshalJSON([]byte(str), &x); err != nil { return err @@ -169,7 +169,7 @@ func builtinBase64Decode(_ BuiltinContext, operands []*ast.Term, iter func(*ast. if err != nil { return err } - return iter(ast.NewTerm(ast.String(result))) + return iter(ast.StringTerm(string(result))) } func builtinBase64IsValid(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -224,7 +224,7 @@ func builtinBase64UrlDecode(_ BuiltinContext, operands []*ast.Term, iter func(*a if err != nil { return err } - return iter(ast.NewTerm(ast.String(result))) + return iter(ast.StringTerm(string(result))) } func builtinURLQueryEncode(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -255,7 +255,7 @@ func builtinURLQueryEncodeObject(_ BuiltinContext, operands []*ast.Term, iter fu return err } - inputs, ok := asJSON.(map[string]interface{}) + inputs, ok := asJSON.(map[string]any) if !ok { return builtins.NewOperandTypeErr(1, operands[0].Value, "object") } @@ -266,7 +266,7 @@ func builtinURLQueryEncodeObject(_ BuiltinContext, operands []*ast.Term, iter fu switch vv := v.(type) { case string: query.Set(k, vv) - case []interface{}: + case []any: for _, val := range vv { strVal, ok := val.(string) if !ok { @@ -340,7 +340,7 @@ func builtinYAMLUnmarshal(_ BuiltinContext, operands []*ast.Term, iter func(*ast buf := bytes.NewBuffer(bs) decoder := util.NewJSONDecoder(buf) - var val interface{} + var val any err = decoder.Decode(&val) if err != nil { return err @@ -358,7 +358,7 @@ func builtinYAMLIsValid(_ BuiltinContext, operands []*ast.Term, iter func(*ast.T return iter(ast.InternedBooleanTerm(false)) } - var x interface{} + var x any err = yaml.Unmarshal([]byte(str), &x) return iter(ast.InternedBooleanTerm(err == nil)) } @@ -380,7 +380,7 @@ func builtinHexDecode(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter if err != nil { return err } - return iter(ast.NewTerm(ast.String(val))) + return iter(ast.StringTerm(string(val))) } func init() { diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/eval.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/eval.go index 221b29d00..e80177710 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/eval.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/eval.go @@ -214,7 +214,7 @@ func (e *eval) partial() bool { return e.saveSet != nil } -func (e *eval) unknown(x interface{}, b *bindings) bool { +func (e *eval) unknown(x any, b *bindings) bool { if !e.partial() { return false } @@ -447,6 +447,11 @@ func (e *eval) evalStep(iter evalIterator) error { case *ast.Term: // generateVar inlined here to avoid extra allocations in hot path rterm := ast.VarTerm(e.fmtVarTerm()) + + if e.partial() { + e.inliningControl.PushDisable(rterm.Value, true) + } + err = e.unify(terms, rterm, func() error { if e.saveSet.Contains(rterm, e.bindings) { return e.saveExpr(ast.NewExpr(rterm), e.bindings, func() error { @@ -461,6 +466,10 @@ func (e *eval) evalStep(iter evalIterator) error { } return nil }) + + if e.partial() { + e.inliningControl.PopDisable() + } case *ast.Every: eval := evalEvery{ Every: terms, @@ -706,16 +715,31 @@ func (e *eval) evalWithPush(input, data *ast.Term, functionMocks [][2]*ast.Term, e.data = data } + if e.comprehensionCache == nil { + e.comprehensionCache = newComprehensionCache() + } + e.comprehensionCache.Push() e.virtualCache.Push() + + if e.targetStack == nil { + e.targetStack = newRefStack() + } + e.targetStack.Push(targets) e.inliningControl.PushDisable(disable, true) + + if e.functionMocks == nil { + e.functionMocks = newFunctionMocksStack() + } + e.functionMocks.PutPairs(functionMocks) return oldInput, oldData } func (e *eval) evalWithPop(input, data *ast.Term) { + // NOTE(ae) no nil checks here as we assume evalWithPush always called first e.inliningControl.PopDisable() e.targetStack.Pop() e.virtualCache.Pop() @@ -899,7 +923,6 @@ func (e *eval) evalCall(terms []*ast.Term, iter unifyIterator) error { eval := evalFunc{ e: e, - ref: ref, terms: terms, ir: ir, } @@ -1263,6 +1286,10 @@ func (e *eval) buildComprehensionCache(a *ast.Term) (*ast.Term, error) { return nil, nil } + if e.comprehensionCache == nil { + e.comprehensionCache = newComprehensionCache() + } + cache, ok := e.comprehensionCache.Elem(a) if !ok { var err error @@ -1778,9 +1805,9 @@ func (e *eval) resolveReadFromStorage(ref ast.Ref, a ast.Value) (ast.Value, erro if len(path) == 0 { switch obj := blob.(type) { - case map[string]interface{}: + case map[string]any: if len(obj) > 0 { - cpy := make(map[string]interface{}, len(obj)-1) + cpy := make(map[string]any, len(obj)-1) for k, v := range obj { if string(ast.SystemDocumentKey) != k { cpy[k] = v @@ -1799,7 +1826,7 @@ func (e *eval) resolveReadFromStorage(ref ast.Ref, a ast.Value) (ast.Value, erro case ast.Value: v = blob default: - if blob, ok := blob.(map[string]interface{}); ok && !e.strictObjects { + if blob, ok := blob.(map[string]any); ok && !e.strictObjects { v = ast.LazyObject(blob) break } @@ -1996,7 +2023,6 @@ func (e *evalBuiltin) eval(iter unifyIterator) error { type evalFunc struct { e *eval ir *ast.IndexResult - ref ast.Ref terms []*ast.Term } @@ -2039,10 +2065,12 @@ func (e evalFunc) eval(iter unifyIterator) error { } } - if mustGenerateSupport || e.e.inliningControl.shallow || e.e.inliningControl.Disabled(e.ref, false) { + ref := e.terms[0].Value.(ast.Ref) + + if mustGenerateSupport || e.e.inliningControl.shallow || e.e.inliningControl.Disabled(ref, false) { // check if the function definitions, or any of the arguments // contain something unknown - unknown := e.e.unknown(e.ref, e.e.bindings) + unknown := e.e.unknown(ref, e.e.bindings) for i := 1; !unknown && i <= argCount; i++ { unknown = e.e.unknown(e.terms[i], e.e.bindings) } @@ -2141,11 +2169,9 @@ func (e evalFunc) evalValue(iter unifyIterator, argCount int, findOne bool) erro } func (e evalFunc) evalCache(argCount int, iter unifyIterator) (ast.Ref, bool, error) { - var plen int - if len(e.terms) == argCount+2 { // func name + output = 2 - plen = len(e.terms) - 1 - } else { - plen = len(e.terms) + plen := len(e.terms) + if plen == argCount+2 { // func name + output = 2 + plen -= 1 } cacheKey := make([]*ast.Term, plen) @@ -2237,8 +2263,7 @@ func (e evalFunc) evalOneRule(iter unifyIterator, rule *ast.Rule, args []*ast.Te } func (e evalFunc) partialEvalSupport(declArgsLen int, iter unifyIterator) error { - - path := e.e.namespaceRef(e.ref) + path := e.e.namespaceRef(e.terms[0].Value.(ast.Ref)) if !e.e.saveSupport.Exists(path) { for _, rule := range e.ir.Rules { @@ -3407,13 +3432,17 @@ func (e evalVirtualComplete) eval(iter unifyIterator) error { var generateSupport bool if e.ir.Default != nil { - // If the other term is not constant OR it's equal to the default value, then - // a support rule must be produced as the default value _may_ be required. On - // the other hand, if the other term is constant (i.e., it does not require - // evaluation) and it differs from the default value then the default value is - // _not_ required, so partially evaluate the rule normally. - rterm := e.rbindings.Plug(e.rterm) - generateSupport = !ast.IsConstant(rterm.Value) || e.ir.Default.Head.Value.Equal(rterm) + // If inlining has been disabled for the rterm, and the default rule has a 'false' result value, + // the default value is inconsequential, and support does not need to be generated. + if !(e.ir.Default.Head.Value.Equal(ast.InternedBooleanTerm(false)) && e.e.inliningControl.Disabled(e.rterm.Value, false)) { + // If the other term is not constant OR it's equal to the default value, then + // a support rule must be produced as the default value _may_ be required. On + // the other hand, if the other term is constant (i.e., it does not require + // evaluation) and it differs from the default value then the default value is + // _not_ required, so partially evaluate the rule normally. + rterm := e.rbindings.Plug(e.rterm) + generateSupport = !ast.IsConstant(rterm.Value) || e.ir.Default.Head.Value.Equal(rterm) + } } if generateSupport || e.e.inliningControl.shallow || e.e.inliningControl.Disabled(e.plugged[:e.pos+1], false) { @@ -4079,7 +4108,7 @@ func newNestedCheckVisitor() *nestedCheckVisitor { return v } -func (v *nestedCheckVisitor) visit(x interface{}) bool { +func (v *nestedCheckVisitor) visit(x any) bool { switch x.(type) { case ast.Ref, ast.Call: v.found = true @@ -4170,7 +4199,7 @@ func isOtherRef(term *ast.Term) bool { return !ref.HasPrefix(ast.DefaultRootRef) && !ref.HasPrefix(ast.InputRootRef) } -func isFunction(env *ast.TypeEnv, ref interface{}) bool { +func isFunction(env *ast.TypeEnv, ref any) bool { var r ast.Ref switch v := ref.(type) { case ast.Ref: diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/graphql.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/graphql.go index c887041cd..871eb4f4c 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/graphql.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/graphql.go @@ -7,14 +7,15 @@ package topdown import ( "encoding/json" "fmt" + "strconv" "strings" - gqlast "github.com/open-policy-agent/opa/internal/gqlparser/ast" - gqlparser "github.com/open-policy-agent/opa/internal/gqlparser/parser" - gqlvalidator "github.com/open-policy-agent/opa/internal/gqlparser/validator" + gqlast "github.com/vektah/gqlparser/v2/ast" + gqlparser "github.com/vektah/gqlparser/v2/parser" + gqlvalidator "github.com/vektah/gqlparser/v2/validator" // Side-effecting import. Triggers GraphQL library's validation rule init() functions. - _ "github.com/open-policy-agent/opa/internal/gqlparser/validator/rules" + _ "github.com/vektah/gqlparser/v2/validator/rules" "github.com/open-policy-agent/opa/v1/ast" "github.com/open-policy-agent/opa/v1/topdown/builtins" @@ -31,9 +32,7 @@ func parseSchema(schema string) (*gqlast.SchemaDocument, error) { // definitions. schemaAST, err := gqlparser.ParseSchema(&gqlast.Source{Input: schema}) if err != nil { - errorParts := strings.SplitN(err.Error(), ":", 4) - msg := strings.TrimLeft(errorParts[3], " ") - return nil, fmt.Errorf("%s in GraphQL string at location %s:%s", msg, errorParts[1], errorParts[2]) + return nil, formatGqlParserError(err) } return schemaAST, nil } @@ -42,9 +41,7 @@ func parseSchema(schema string) (*gqlast.SchemaDocument, error) { func parseQuery(query string) (*gqlast.QueryDocument, error) { queryAST, err := gqlparser.ParseQuery(&gqlast.Source{Input: query}) if err != nil { - errorParts := strings.SplitN(err.Error(), ":", 4) - msg := strings.TrimLeft(errorParts[3], " ") - return nil, fmt.Errorf("%s in GraphQL string at location %s:%s", msg, errorParts[1], errorParts[2]) + return nil, formatGqlParserError(err) } return queryAST, nil } @@ -56,15 +53,7 @@ func validateQuery(schema *gqlast.Schema, query *gqlast.QueryDocument) error { // Validate the query against the schema, erroring if there's an issue. err := gqlvalidator.Validate(schema, query) if err != nil { - // We use strings.TrimSuffix to remove the '.' characters that the library - // authors include on most of their validation errors. This should be safe, - // since variable names in their error messages are usually quoted, and - // this affects only the last character(s) in the string. - // NOTE(philipc): We know the error location will be in the query string, - // because schema validation always happens before this function is called. - errorParts := strings.SplitN(err.Error(), ":", 4) - msg := strings.TrimSuffix(strings.TrimLeft(errorParts[3], " "), ".\n") - return fmt.Errorf("%s in GraphQL query string at location %s:%s", msg, errorParts[1], errorParts[2]) + return formatGqlParserError(err) } return nil } @@ -101,7 +90,7 @@ func convertSchema(schemaDoc *gqlast.SchemaDocument) (*gqlast.Schema, error) { // Converts an ast.Object into a gqlast.QueryDocument object. func objectToQueryDocument(value ast.Object) (*gqlast.QueryDocument, error) { - // Convert ast.Term to interface{} for JSON encoding below. + // Convert ast.Term to any for JSON encoding below. asJSON, err := ast.JSON(value) if err != nil { return nil, err @@ -122,7 +111,7 @@ func objectToQueryDocument(value ast.Object) (*gqlast.QueryDocument, error) { // Converts an ast.Object into a gqlast.SchemaDocument object. func objectToSchemaDocument(value ast.Object) (*gqlast.SchemaDocument, error) { - // Convert ast.Term to interface{} for JSON encoding below. + // Convert ast.Term to any for JSON encoding below. asJSON, err := ast.JSON(value) if err != nil { return nil, err @@ -223,6 +212,34 @@ func pruneIrrelevantGraphQLASTNodes(value ast.Value) ast.Value { } } +func formatGqlParserError(err error) error { + // We use strings.TrimSuffix to remove the '.' characters that the library + // authors include on most of their validation errors. This should be safe, + // since variable names in their error messages are usually quoted, and + // this affects only the last character(s) in the string. + // NOTE(philipc): We know the error location will be in the query string, + // because schema validation always happens before this function is called. + // NOTE(rm): gqlparser does not _always_ return the error location + // so only populate location if it is available + if err == nil { + return nil + } + // If the error contains location information, format it nicely + errorParts := strings.SplitN(err.Error(), ":", 4) + if len(errorParts) >= 4 { + row, err := strconv.ParseUint(errorParts[1], 10, 64) + if err == nil { + col, err := strconv.ParseUint(errorParts[2], 10, 64) + if err == nil { + msg := strings.TrimSuffix(strings.TrimLeft(errorParts[len(errorParts)-1], " "), ".\n") + return fmt.Errorf("%s in GraphQL string at location %d:%d", msg, row, col) + } + } + } + // Wrap and return the full error if location information is not available + return fmt.Errorf("GraphQL parse error: %w", err) +} + // Reports errors from parsing/validation. func builtinGraphQLParse(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { var queryDoc *gqlast.QueryDocument diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/http.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/http.go index 463f01de2..1bad104ca 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/http.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/http.go @@ -19,6 +19,7 @@ import ( "net/url" "os" "runtime" + "slices" "strconv" "strings" "time" @@ -88,22 +89,13 @@ var cacheableHTTPStatusCodes = [...]int{ } var ( - codeTerm = ast.StringTerm("code") - messageTerm = ast.StringTerm("message") - statusCodeTerm = ast.StringTerm("status_code") - errorTerm = ast.StringTerm("error") - methodTerm = ast.StringTerm("method") - urlTerm = ast.StringTerm("url") - httpSendNetworkErrTerm = ast.StringTerm(HTTPSendNetworkErr) httpSendInternalErrTerm = ast.StringTerm(HTTPSendInternalErr) -) -var ( allowedKeys = ast.NewSet() keyCache = make(map[string]*ast.Term, len(allowedKeyNames)) cacheableCodes = ast.NewSet() - requiredKeys = ast.NewSet(methodTerm, urlTerm) + requiredKeys = ast.NewSet(ast.InternedStringTerm("method"), ast.InternedStringTerm("url")) httpSendLatencyMetricKey = "rego_builtin_http_send" httpSendInterQueryCacheHits = httpSendLatencyMetricKey + "_interquery_cache_hits" ) @@ -169,20 +161,20 @@ func generateRaiseErrorResult(err error) *ast.Term { switch err.(type) { case *url.Error: errObj = ast.NewObject( - ast.Item(codeTerm, httpSendNetworkErrTerm), - ast.Item(messageTerm, ast.StringTerm(err.Error())), + ast.Item(ast.InternedStringTerm("code"), httpSendNetworkErrTerm), + ast.Item(ast.InternedStringTerm("message"), ast.StringTerm(err.Error())), ) default: errObj = ast.NewObject( - ast.Item(codeTerm, httpSendInternalErrTerm), - ast.Item(messageTerm, ast.StringTerm(err.Error())), + ast.Item(ast.InternedStringTerm("code"), httpSendInternalErrTerm), + ast.Item(ast.InternedStringTerm("message"), ast.StringTerm(err.Error())), ) } - return ast.NewTerm(ast.NewObject( - ast.Item(statusCodeTerm, ast.InternedIntNumberTerm(0)), - ast.Item(errorTerm, ast.NewTerm(errObj)), - )) + return ast.ObjectTerm( + ast.Item(ast.InternedStringTerm("status_code"), ast.InternedIntNumberTerm(0)), + ast.Item(ast.InternedStringTerm("error"), ast.NewTerm(errObj)), + ) } func getHTTPResponse(bctx BuiltinContext, req ast.Object) (*ast.Term, error) { @@ -242,7 +234,7 @@ func getKeyFromRequest(req ast.Object) (ast.Object, error) { if err != nil { return nil, err } - var allHeaders map[string]interface{} + var allHeaders map[string]any err = ast.As(allHeadersTerm.Value, &allHeaders) if err != nil { return nil, err @@ -325,8 +317,8 @@ func validateHTTPRequestOperand(term *ast.Term, pos int) (ast.Object, error) { // canonicalizeHeaders returns a copy of the headers where the keys are in // canonical HTTP form. -func canonicalizeHeaders(headers map[string]interface{}) map[string]interface{} { - canonicalized := map[string]interface{}{} +func canonicalizeHeaders(headers map[string]any) map[string]any { + canonicalized := map[string]any{} for k, v := range headers { canonicalized[http.CanonicalHeaderKey(k)] = v @@ -379,10 +371,8 @@ func verifyHost(bctx BuiltinContext, host string) error { return nil } - for _, allowed := range bctx.Capabilities.AllowNet { - if allowed == host { - return nil - } + if slices.Contains(bctx.Capabilities.AllowNet, host) { + return nil } return fmt.Errorf("unallowed host: %s", host) @@ -420,7 +410,7 @@ func createHTTPRequest(bctx BuiltinContext, obj ast.Object) (*http.Request, *htt enableRedirect, tlsInsecureSkipVerify bool tlsUseSystemCerts *bool tlsConfig tls.Config - customHeaders map[string]interface{} + customHeaders map[string]any ) timeout := defaultHTTPRequestTimeout @@ -518,7 +508,7 @@ func createHTTPRequest(bctx BuiltinContext, obj ast.Object) (*http.Request, *htt return nil, nil, err } var ok bool - customHeaders, ok = headersValInterface.(map[string]interface{}) + customHeaders, ok = headersValInterface.(map[string]any) if !ok { return nil, nil, errors.New("invalid type for headers key") } @@ -1387,7 +1377,7 @@ func formatHTTPResponseToAST(resp *http.Response, forceJSONDecode, forceYAMLDeco } func prepareASTResult(headers http.Header, forceJSONDecode, forceYAMLDecode bool, body []byte, status string, statusCode int) (ast.Value, error) { - var resultBody interface{} + var resultBody any // If the response body cannot be JSON/YAML decoded, // an error will not be returned. Instead, the "body" field @@ -1399,7 +1389,7 @@ func prepareASTResult(headers http.Header, forceJSONDecode, forceYAMLDecode bool _ = util.Unmarshal(body, &resultBody) } - result := make(map[string]interface{}) + result := make(map[string]any) result["status"] = status result["status_code"] = statusCode result["body"] = resultBody @@ -1414,10 +1404,10 @@ func prepareASTResult(headers http.Header, forceJSONDecode, forceYAMLDecode bool return resultObj, nil } -func getResponseHeaders(headers http.Header) map[string]interface{} { - respHeaders := map[string]interface{}{} +func getResponseHeaders(headers http.Header) map[string]any { + respHeaders := map[string]any{} for headerName, values := range headers { - var respValues []interface{} + var respValues []any for _, v := range values { respValues = append(respValues, v) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/json.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/json.go index aa1023d37..2c7d64288 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/json.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/json.go @@ -7,7 +7,6 @@ package topdown import ( "errors" "fmt" - "strconv" "strings" "github.com/open-policy-agent/opa/v1/ast" @@ -103,7 +102,7 @@ func jsonRemove(a *ast.Term, b *ast.Term) (*ast.Term, error) { v := aValue.Elem(i) // recurse and add the diff of sub objects as needed // Note: Keys in b will be strings for the index, eg path /a/1/b => {"a": {"1": {"b": null}}} - diffValue, err := jsonRemove(v, bObj.Get(ast.StringTerm(strconv.Itoa(i)))) + diffValue, err := jsonRemove(v, bObj.Get(ast.InternedIntegerString(i))) if err != nil { return nil, err } diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/jsonschema.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/jsonschema.go index b1609fb04..88057c774 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/jsonschema.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/jsonschema.go @@ -29,7 +29,7 @@ func astValueToJSONSchemaLoader(value ast.Value) (gojsonschema.JSONLoader, error loader = gojsonschema.NewStringLoader(string(x)) case ast.Object: // In case of object serialize it to JSON representation. - var data interface{} + var data any data, err = ast.JSON(value) if err != nil { return nil, err diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/object.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/object.go index 56313b5b5..c6fbe7022 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/object.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/object.go @@ -121,8 +121,8 @@ func builtinObjectGet(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter } // if the get key is not an array, attempt to get the top level key for the operand value in the object - path, err := builtins.ArrayOperand(operands[1].Value, 2) - if err != nil { + path, ok := operands[1].Value.(*ast.Array) + if !ok { if ret := object.Get(operands[1]); ret != nil { return iter(ret) } @@ -150,6 +150,9 @@ func builtinObjectKeys(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Te if err != nil { return err } + if object.Len() == 0 { + return iter(ast.InternedEmptySet) + } return iter(ast.SetTerm(object.Keys()...)) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/query.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/query.go index bb55b4a6c..4c6300f8d 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/query.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/query.go @@ -78,7 +78,6 @@ func NewQuery(query ast.Body) *Query { genvarprefix: ast.WildcardPrefix, indexing: true, earlyExit: true, - external: newResolverTrie(), } } @@ -278,6 +277,9 @@ func (q *Query) WithBuiltinErrorList(list *[]Error) *Query { // WithResolver configures an external resolver to use for the given ref. func (q *Query) WithResolver(ref ast.Ref, r resolver.Resolver) *Query { + if q.external == nil { + q.external = newResolverTrie() + } q.external.Put(ref, r) return q } @@ -382,7 +384,6 @@ func (q *Query) PartialRun(ctx context.Context) (partials []ast.Body, support [] compiler: q.compiler, store: q.store, baseCache: bc, - targetStack: newRefStack(), txn: q.txn, input: q.input, external: q.external, @@ -392,16 +393,14 @@ func (q *Query) PartialRun(ctx context.Context) (partials []ast.Body, support [] instr: q.instr, builtins: q.builtins, builtinCache: builtins.Cache{}, - functionMocks: newFunctionMocksStack(), interQueryBuiltinCache: q.interQueryBuiltinCache, interQueryBuiltinValueCache: q.interQueryBuiltinValueCache, ndBuiltinCache: q.ndBuiltinCache, virtualCache: vc, - comprehensionCache: newComprehensionCache(), saveSet: newSaveSet(q.unknowns, b, q.instr), saveStack: newSaveStack(), saveSupport: newSaveSupport(), - saveNamespace: ast.StringTerm(q.partialNamespace), + saveNamespace: ast.InternedStringTerm(q.partialNamespace), skipSaveNamespace: q.skipSaveNamespace, inliningControl: &inliningControl{ shallow: q.shallowInlining, @@ -580,7 +579,6 @@ func (q *Query) Iter(ctx context.Context, iter func(QueryResult) error) error { compiler: q.compiler, store: q.store, baseCache: bc, - targetStack: newRefStack(), txn: q.txn, input: q.input, external: q.external, @@ -590,12 +588,10 @@ func (q *Query) Iter(ctx context.Context, iter func(QueryResult) error) error { instr: q.instr, builtins: q.builtins, builtinCache: builtins.Cache{}, - functionMocks: newFunctionMocksStack(), interQueryBuiltinCache: q.interQueryBuiltinCache, interQueryBuiltinValueCache: q.interQueryBuiltinValueCache, ndBuiltinCache: q.ndBuiltinCache, virtualCache: vc, - comprehensionCache: newComprehensionCache(), genvarprefix: q.genvarprefix, runtime: q.runtime, indexing: q.indexing, diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/regex.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/regex.go index 2c434dda8..1a5bb6234 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/regex.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/regex.go @@ -101,7 +101,7 @@ func builtinRegexSplit(bctx BuiltinContext, operands []*ast.Term, iter func(*ast for i := range elems { arr[i] = ast.StringTerm(elems[i]) } - return iter(ast.NewTerm(ast.NewArray(arr...))) + return iter(ast.ArrayTerm(arr...)) } func getRegexp(bctx BuiltinContext, pat string) (*regexp.Regexp, error) { @@ -203,7 +203,7 @@ func builtinRegexFind(bctx BuiltinContext, operands []*ast.Term, iter func(*ast. for i := range elems { arr[i] = ast.StringTerm(elems[i]) } - return iter(ast.NewTerm(ast.NewArray(arr...))) + return iter(ast.ArrayTerm(arr...)) } func builtinRegexFindAllStringSubmatch(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -232,10 +232,10 @@ func builtinRegexFindAllStringSubmatch(bctx BuiltinContext, operands []*ast.Term for j := range matches[i] { inner[j] = ast.StringTerm(matches[i][j]) } - outer[i] = ast.NewTerm(ast.NewArray(inner...)) + outer[i] = ast.ArrayTerm(inner...) } - return iter(ast.NewTerm(ast.NewArray(outer...))) + return iter(ast.ArrayTerm(outer...)) } func builtinRegexReplace(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -264,7 +264,7 @@ func builtinRegexReplace(bctx BuiltinContext, operands []*ast.Term, iter func(*a return iter(operands[0]) } - return iter(ast.StringTerm(res)) + return iter(ast.InternedStringTerm(res)) } func init() { diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/resolver.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/resolver.go index 362016887..8fff22b1d 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/resolver.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/resolver.go @@ -35,6 +35,10 @@ func (t *resolverTrie) Put(ref ast.Ref, r resolver.Resolver) { func (t *resolverTrie) Resolve(e *eval, ref ast.Ref) (ast.Value, error) { e.metrics.Timer(metrics.RegoExternalResolve).Start() defer e.metrics.Timer(metrics.RegoExternalResolve).Stop() + + if t == nil { + return nil, nil + } node := t for i, t := range ref { child, ok := node.children[t.Value] diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/runtime.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/runtime.go index dc72fc581..8517edb8f 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/runtime.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/runtime.go @@ -11,8 +11,6 @@ import ( "github.com/open-policy-agent/opa/v1/ast" ) -var configStringTerm = ast.StringTerm("config") - var nothingResolver ast.Resolver = illegalResolver{} func builtinOPARuntime(bctx BuiltinContext, _ []*ast.Term, iter func(*ast.Term) error) error { @@ -21,14 +19,14 @@ func builtinOPARuntime(bctx BuiltinContext, _ []*ast.Term, iter func(*ast.Term) return iter(ast.InternedEmptyObject) } - if bctx.Runtime.Get(configStringTerm) != nil { + if bctx.Runtime.Get(ast.InternedStringTerm("config")) != nil { iface, err := ast.ValueToInterface(bctx.Runtime.Value, nothingResolver) if err != nil { return err } - if object, ok := iface.(map[string]interface{}); ok { + if object, ok := iface.(map[string]any); ok { if cfgRaw, ok := object["config"]; ok { - if config, ok := cfgRaw.(map[string]interface{}); ok { + if config, ok := cfgRaw.(map[string]any); ok { configPurged, err := activeConfig(config) if err != nil { return err @@ -51,7 +49,7 @@ func init() { RegisterBuiltinFunc(ast.OPARuntime.Name, builtinOPARuntime) } -func activeConfig(config map[string]interface{}) (interface{}, error) { +func activeConfig(config map[string]any) (any, error) { if config["services"] != nil { err := removeServiceCredentials(config["services"]) @@ -70,10 +68,10 @@ func activeConfig(config map[string]interface{}) (interface{}, error) { return config, nil } -func removeServiceCredentials(x interface{}) error { +func removeServiceCredentials(x any) error { switch x := x.(type) { - case []interface{}: + case []any: for _, v := range x { err := removeKey(v, "credentials") if err != nil { @@ -81,7 +79,7 @@ func removeServiceCredentials(x interface{}) error { } } - case map[string]interface{}: + case map[string]any: for _, v := range x { err := removeKey(v, "credentials") if err != nil { @@ -95,10 +93,10 @@ func removeServiceCredentials(x interface{}) error { return nil } -func removeCryptoKeys(x interface{}) error { +func removeCryptoKeys(x any) error { switch x := x.(type) { - case map[string]interface{}: + case map[string]any: for _, v := range x { err := removeKey(v, "key", "private_key") if err != nil { @@ -112,8 +110,8 @@ func removeCryptoKeys(x interface{}) error { return nil } -func removeKey(x interface{}, keys ...string) error { - val, ok := x.(map[string]interface{}) +func removeKey(x any, keys ...string) error { + val, ok := x.(map[string]any) if !ok { return errors.New("type assertion error") } @@ -127,6 +125,6 @@ func removeKey(x interface{}, keys ...string) error { type illegalResolver struct{} -func (illegalResolver) Resolve(ref ast.Ref) (interface{}, error) { +func (illegalResolver) Resolve(ref ast.Ref) (any, error) { return nil, fmt.Errorf("illegal value: %v", ref) } diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/save.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/save.go index 439f554a3..47bf7521b 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/save.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/save.go @@ -1,8 +1,10 @@ package topdown import ( + "cmp" "container/list" "fmt" + "slices" "strings" "github.com/open-policy-agent/opa/v1/ast" @@ -355,11 +357,11 @@ func splitPackageAndRule(path ast.Ref) (ast.Ref, ast.Ref) { // being saved. This check allows the evaluator to evaluate statements // completely during partial evaluation as long as they do not depend on any // kind of unknown value or statements that would generate saves. -func saveRequired(c *ast.Compiler, ic *inliningControl, icIgnoreInternal bool, ss *saveSet, b *bindings, x interface{}, rec bool) bool { +func saveRequired(c *ast.Compiler, ic *inliningControl, icIgnoreInternal bool, ss *saveSet, b *bindings, x any, rec bool) bool { var found bool - vis := ast.NewGenericVisitor(func(node interface{}) bool { + vis := ast.NewGenericVisitor(func(node any) bool { if found { return found } @@ -418,13 +420,7 @@ func ignoreDuringPartial(bi *ast.Builtin) bool { // Note(philipc): We keep this legacy check around to avoid breaking // existing library users. //nolint:staticcheck // We specifically ignore our own linter warning here. - for _, ignore := range ast.IgnoreDuringPartialEval { - if bi == ignore { - return true - } - } - // Otherwise, ensure all non-deterministic builtins are thrown out. - return bi.Nondeterministic + return cmp.Or(slices.Contains(ast.IgnoreDuringPartialEval, bi), bi.Nondeterministic) } type inliningControl struct { @@ -436,18 +432,44 @@ type inliningControl struct { type disableInliningFrame struct { internal bool refs []ast.Ref + v ast.Var } -func (i *inliningControl) PushDisable(refs []ast.Ref, internal bool) { +func (i *inliningControl) PushDisable(x any, internal bool) { if i == nil { return } + + switch x := x.(type) { + case []ast.Ref: + i.PushDisableRefs(x, internal) + case ast.Var: + i.PushDisableVar(x, internal) + } +} + +func (i *inliningControl) PushDisableRefs(refs []ast.Ref, internal bool) { + if i == nil { + return + } + i.disable = append(i.disable, disableInliningFrame{ internal: internal, refs: refs, }) } +func (i *inliningControl) PushDisableVar(v ast.Var, internal bool) { + if i == nil { + return + } + + i.disable = append(i.disable, disableInliningFrame{ + internal: internal, + v: v, + }) +} + func (i *inliningControl) PopDisable() { if i == nil { return @@ -455,10 +477,26 @@ func (i *inliningControl) PopDisable() { i.disable = i.disable[:len(i.disable)-1] } -func (i *inliningControl) Disabled(ref ast.Ref, ignoreInternal bool) bool { +func (i *inliningControl) Disabled(x any, ignoreInternal bool) bool { if i == nil { return false } + + switch x := x.(type) { + case ast.Ref: + return i.DisabledRef(x, ignoreInternal) + case ast.Var: + return i.DisabledVar(x, ignoreInternal) + } + + return false +} + +func (i *inliningControl) DisabledRef(ref ast.Ref, ignoreInternal bool) bool { + if i == nil { + return false + } + for _, frame := range i.disable { if !frame.internal || !ignoreInternal { for _, other := range frame.refs { @@ -470,3 +508,16 @@ func (i *inliningControl) Disabled(ref ast.Ref, ignoreInternal bool) bool { } return false } + +func (i *inliningControl) DisabledVar(v ast.Var, ignoreInternal bool) bool { + if i == nil { + return false + } + + for _, frame := range i.disable { + if (!frame.internal || !ignoreInternal) && frame.v.Equal(v) { + return true + } + } + return false +} diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/sets.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/sets.go index 9df2d328a..c50efe4a8 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/sets.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/sets.go @@ -35,7 +35,7 @@ func builtinSetIntersection(_ BuiltinContext, operands []*ast.Term, iter func(*a // empty input set if inputSet.Len() == 0 { - return iter(ast.NewTerm(ast.NewSet())) + return iter(ast.InternedEmptySet) } var result ast.Set diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go index 3b1a412c3..376dfa9a6 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/strings.go @@ -18,7 +18,6 @@ import ( "github.com/open-policy-agent/opa/v1/ast" "github.com/open-policy-agent/opa/v1/topdown/builtins" - "github.com/open-policy-agent/opa/v1/util" ) func builtinAnyPrefixMatch(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -136,6 +135,9 @@ func builtinFormatInt(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter case ast.Number("8"): format = "%o" case ast.Number("10"): + if i, ok := input.Int(); ok { + return iter(ast.InternedIntegerString(i)) + } format = "%d" case ast.Number("16"): format = "%x" @@ -146,10 +148,10 @@ func builtinFormatInt(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter f := builtins.NumberToFloat(input) i, _ := f.Int(nil) - return iter(ast.StringTerm(fmt.Sprintf(format, i))) + return iter(ast.InternedStringTerm(fmt.Sprintf(format, i))) } -func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { +func builtinConcat(b BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { join, err := builtins.StringOperand(operands[0].Value, 1) if err != nil { @@ -202,7 +204,7 @@ func builtinConcat(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) return builtins.NewOperandTypeErr(2, operands[1].Value, "set", "array") } - return iter(ast.StringTerm(strings.Join(strs, string(join)))) + return iter(ast.InternedStringTerm(strings.Join(strs, string(join)))) } func runesEqual(a, b []rune) bool { @@ -321,18 +323,15 @@ func builtinSubstring(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter } if length < 0 { - return iter(ast.StringTerm(sbase[startIndex:])) + return iter(ast.InternedStringTerm(sbase[startIndex:])) } if startIndex == 0 && length >= len(sbase) { return iter(operands[0]) } - upto := startIndex + length - if len(sbase) < upto { - upto = len(sbase) - } - return iter(ast.StringTerm(sbase[startIndex:upto])) + upto := min(len(sbase), startIndex+length) + return iter(ast.InternedStringTerm(sbase[startIndex:upto])) } if startIndex == 0 && length >= utf8.RuneCountInString(sbase) { @@ -349,14 +348,11 @@ func builtinSubstring(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter if length < 0 { s = string(runes[startIndex:]) } else { - upto := startIndex + length - if len(runes) < upto { - upto = len(runes) - } + upto := min(len(runes), startIndex+length) s = string(runes[startIndex:upto]) } - return iter(ast.StringTerm(s)) + return iter(ast.InternedStringTerm(s)) } func isASCII(s string) bool { @@ -434,7 +430,14 @@ func builtinLower(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e return err } - return iter(ast.StringTerm(strings.ToLower(string(s)))) + arg := string(s) + low := strings.ToLower(arg) + + if arg == low { + return iter(operands[0]) + } + + return iter(ast.InternedStringTerm(low)) } func builtinUpper(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -450,7 +453,7 @@ func builtinUpper(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e return iter(operands[0]) } - return iter(ast.StringTerm(upp)) + return iter(ast.InternedStringTerm(upp)) } func builtinSplit(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -469,9 +472,10 @@ func builtinSplit(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) e } elems := strings.Split(string(s), string(d)) - arr := util.NewPtrSlice[ast.Term](len(elems)) + arr := make([]*ast.Term, len(elems)) + for i := range elems { - arr[i].Value = ast.String(elems[i]) + arr[i] = ast.InternedStringTerm(elems[i]) } return iter(ast.ArrayTerm(arr...)) @@ -498,7 +502,7 @@ func builtinReplace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) return iter(operands[0]) } - return iter(ast.StringTerm(replaced)) + return iter(ast.InternedStringTerm(replaced)) } func builtinReplaceN(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -528,7 +532,7 @@ func builtinReplaceN(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term oldnewArr = append(oldnewArr, string(keyVal), string(strVal)) } - return iter(ast.StringTerm(strings.NewReplacer(oldnewArr...).Replace(string(s)))) + return iter(ast.InternedStringTerm(strings.NewReplacer(oldnewArr...).Replace(string(s)))) } func builtinTrim(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -547,7 +551,7 @@ func builtinTrim(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) er return iter(operands[0]) } - return iter(ast.StringTerm(strings.Trim(string(s), string(c)))) + return iter(ast.InternedStringTerm(strings.Trim(string(s), string(c)))) } func builtinTrimLeft(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -566,7 +570,7 @@ func builtinTrimLeft(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term return iter(operands[0]) } - return iter(ast.StringTerm(trimmed)) + return iter(ast.InternedStringTerm(trimmed)) } func builtinTrimPrefix(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -585,7 +589,7 @@ func builtinTrimPrefix(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Te return iter(operands[0]) } - return iter(ast.StringTerm(trimmed)) + return iter(ast.InternedStringTerm(trimmed)) } func builtinTrimRight(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -604,7 +608,7 @@ func builtinTrimRight(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter return iter(operands[0]) } - return iter(ast.StringTerm(trimmed)) + return iter(ast.InternedStringTerm(trimmed)) } func builtinTrimSuffix(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -623,7 +627,7 @@ func builtinTrimSuffix(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Te return iter(operands[0]) } - return iter(ast.StringTerm(trimmed)) + return iter(ast.InternedStringTerm(trimmed)) } func builtinTrimSpace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -637,7 +641,7 @@ func builtinTrimSpace(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter return iter(operands[0]) } - return iter(ast.StringTerm(trimmed)) + return iter(ast.InternedStringTerm(trimmed)) } func builtinSprintf(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -656,7 +660,10 @@ func builtinSprintf(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) if s == "%d" && astArr.Len() == 1 { if n, ok := astArr.Elem(0).Value.(ast.Number); ok { if i, ok := n.Int(); ok { - return iter(ast.InternedStringTerm(strconv.Itoa(i))) + if interned := ast.InternedIntegerString(i); interned != nil { + return iter(interned) + } + return iter(ast.StringTerm(strconv.Itoa(i))) } } } @@ -682,7 +689,7 @@ func builtinSprintf(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) } } - return iter(ast.StringTerm(fmt.Sprintf(string(s), args...))) + return iter(ast.InternedStringTerm(fmt.Sprintf(string(s), args...))) } func builtinReverse(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { @@ -691,7 +698,7 @@ func builtinReverse(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) return err } - return iter(ast.StringTerm(reverseString(string(s)))) + return iter(ast.InternedStringTerm(reverseString(string(s)))) } func reverseString(str string) string { diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/template.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/template.go index cf4635559..29038a657 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/template.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/template.go @@ -19,7 +19,7 @@ func renderTemplate(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) return err } - var templateVariables map[string]interface{} + var templateVariables map[string]any if err := ast.As(templateVariablesTerm, &templateVariables); err != nil { return err diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/time.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/time.go index 8d2d9b27a..cfb4c1475 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/time.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/time.go @@ -127,8 +127,8 @@ func builtinDate(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) er return err } year, month, day := t.Date() - result := ast.NewArray(ast.InternedIntNumberTerm(year), ast.InternedIntNumberTerm(int(month)), ast.InternedIntNumberTerm(day)) - return iter(ast.NewTerm(result)) + + return iter(ast.ArrayTerm(ast.InternedIntNumberTerm(year), ast.InternedIntNumberTerm(int(month)), ast.InternedIntNumberTerm(day))) } func builtinClock(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go index 2050e82d6..bebffe5df 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/tokens.go @@ -29,18 +29,7 @@ import ( "github.com/open-policy-agent/opa/v1/topdown/cache" ) -var ( - jwtEncKey = ast.StringTerm("enc") - jwtCtyKey = ast.StringTerm("cty") - jwtIssKey = ast.StringTerm("iss") - jwtExpKey = ast.StringTerm("exp") - jwtNbfKey = ast.StringTerm("nbf") - jwtAudKey = ast.StringTerm("aud") -) - -const ( - headerJwt = "JWT" -) +const headerJwt = "JWT" // JSONWebToken represent the 3 parts (header, payload & signature) of // @@ -86,7 +75,7 @@ func builtinJWTDecode(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Ter return fmt.Errorf("JWT payload had invalid encoding: %v", err) } - if cty := token.decodedHeader.Get(jwtCtyKey); cty != nil { + if cty := token.decodedHeader.Get(ast.InternedStringTerm("cty")); cty != nil { ctyVal := string(cty.Value.(ast.String)) // It is possible for the contents of a token to be another // token as a result of nested signing or encryption. To handle @@ -139,7 +128,7 @@ func builtinJWTVerifyRS256(bctx BuiltinContext, operands []*ast.Term, iter func( signature) }) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } @@ -154,7 +143,7 @@ func builtinJWTVerifyRS384(bctx BuiltinContext, operands []*ast.Term, iter func( signature) }) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } @@ -169,7 +158,7 @@ func builtinJWTVerifyRS512(bctx BuiltinContext, operands []*ast.Term, iter func( signature) }) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } @@ -185,7 +174,7 @@ func builtinJWTVerifyPS256(bctx BuiltinContext, operands []*ast.Term, iter func( nil) }) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } @@ -201,7 +190,7 @@ func builtinJWTVerifyPS384(bctx BuiltinContext, operands []*ast.Term, iter func( nil) }) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } @@ -217,14 +206,14 @@ func builtinJWTVerifyPS512(bctx BuiltinContext, operands []*ast.Term, iter func( nil) }) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } // Implements RSA JWT signature verification. -func builtinJWTVerifyRSA(bctx BuiltinContext, jwt ast.Value, keyStr ast.Value, hasher func() hash.Hash, verify func(publicKey *rsa.PublicKey, digest []byte, signature []byte) error) (ast.Value, error) { - return builtinJWTVerify(bctx, jwt, keyStr, hasher, func(publicKey interface{}, digest []byte, signature []byte) error { +func builtinJWTVerifyRSA(bctx BuiltinContext, jwt ast.Value, keyStr ast.Value, hasher func() hash.Hash, verify func(publicKey *rsa.PublicKey, digest []byte, signature []byte) error) (bool, error) { + return builtinJWTVerify(bctx, jwt, keyStr, hasher, func(publicKey any, digest []byte, signature []byte) error { publicKeyRsa, ok := publicKey.(*rsa.PublicKey) if !ok { return errors.New("incorrect public key type") @@ -237,7 +226,7 @@ func builtinJWTVerifyRSA(bctx BuiltinContext, jwt ast.Value, keyStr ast.Value, h func builtinJWTVerifyES256(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { result, err := builtinJWTVerify(bctx, operands[0].Value, operands[1].Value, sha256.New, verifyES) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } @@ -246,7 +235,7 @@ func builtinJWTVerifyES256(bctx BuiltinContext, operands []*ast.Term, iter func( func builtinJWTVerifyES384(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { result, err := builtinJWTVerify(bctx, operands[0].Value, operands[1].Value, sha512.New384, verifyES) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } @@ -255,12 +244,12 @@ func builtinJWTVerifyES384(bctx BuiltinContext, operands []*ast.Term, iter func( func builtinJWTVerifyES512(bctx BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { result, err := builtinJWTVerify(bctx, operands[0].Value, operands[1].Value, sha512.New, verifyES) if err == nil { - return iter(ast.NewTerm(result)) + return iter(ast.InternedBooleanTerm(result)) } return err } -func verifyES(publicKey interface{}, digest []byte, signature []byte) (err error) { +func verifyES(publicKey any, digest []byte, signature []byte) (err error) { defer func() { if r := recover(); r != nil { err = fmt.Errorf("ECDSA signature verification error: %v", r) @@ -283,7 +272,7 @@ func verifyES(publicKey interface{}, digest []byte, signature []byte) (err error type verificationKey struct { alg string kid string - key interface{} + key any } // getKeysFromCertOrJWK returns the public key found in a X.509 certificate or JWK key(s). @@ -346,43 +335,43 @@ func getKeyByKid(kid string, keys []verificationKey) *verificationKey { } // Implements JWT signature verification. -func builtinJWTVerify(bctx BuiltinContext, jwt ast.Value, keyStr ast.Value, hasher func() hash.Hash, verify func(publicKey interface{}, digest []byte, signature []byte) error) (ast.Value, error) { +func builtinJWTVerify(bctx BuiltinContext, jwt ast.Value, keyStr ast.Value, hasher func() hash.Hash, verify func(publicKey any, digest []byte, signature []byte) error) (bool, error) { if found, _, _, valid := getTokenFromCache(bctx, jwt, keyStr); found { - return ast.Boolean(valid), nil + return valid, nil } token, err := decodeJWT(jwt) if err != nil { - return nil, err + return false, err } s, err := builtins.StringOperand(keyStr, 2) if err != nil { - return nil, err + return false, err } keys, err := getKeysFromCertOrJWK(string(s)) if err != nil { - return nil, err + return false, err } signature, err := token.decodeSignature() if err != nil { - return nil, err + return false, err } err = token.decodeHeader() if err != nil { - return nil, err + return false, err } header, err := parseTokenHeader(token) if err != nil { - return nil, err + return false, err } - done := func(valid bool) (ast.Boolean, error) { + done := func(valid bool) (bool, error) { putTokenInCache(bctx, jwt, keyStr, nil, nil, valid) - return ast.Boolean(valid), nil + return valid, nil } // Validate the JWT signature @@ -450,7 +439,7 @@ func builtinJWTVerifyHS(bctx BuiltinContext, operands []*ast.Term, hashF func() } if found, _, _, valid := getTokenFromCache(bctx, jwt, astSecret); found { - return iter(ast.NewTerm(ast.Boolean(valid))) + return iter(ast.InternedBooleanTerm(valid)) } // Decode the JSON Web Token @@ -476,7 +465,7 @@ func builtinJWTVerifyHS(bctx BuiltinContext, operands []*ast.Term, hashF func() putTokenInCache(bctx, jwt, astSecret, nil, nil, valid) - return iter(ast.NewTerm(ast.Boolean(valid))) + return iter(ast.InternedBooleanTerm(valid)) } // -- Full JWT verification and decoding -- @@ -701,8 +690,8 @@ func (constraints *tokenConstraints) validAudience(aud ast.Value) bool { // JWT algorithms type ( - tokenVerifyFunction func(key interface{}, hash crypto.Hash, payload []byte, signature []byte) error - tokenVerifyAsymmetricFunction func(key interface{}, hash crypto.Hash, digest []byte, signature []byte) error + tokenVerifyFunction func(key any, hash crypto.Hash, payload []byte, signature []byte) error + tokenVerifyAsymmetricFunction func(key any, hash crypto.Hash, digest []byte, signature []byte) error ) // jwtAlgorithm describes a JWS 'alg' value @@ -730,7 +719,7 @@ var tokenAlgorithms = map[string]tokenAlgorithm{ // errSignatureNotVerified is returned when a signature cannot be verified. var errSignatureNotVerified = errors.New("signature not verified") -func verifyHMAC(key interface{}, hash crypto.Hash, payload []byte, signature []byte) error { +func verifyHMAC(key any, hash crypto.Hash, payload []byte, signature []byte) error { macKey, ok := key.([]byte) if !ok { return errors.New("incorrect symmetric key type") @@ -746,14 +735,14 @@ func verifyHMAC(key interface{}, hash crypto.Hash, payload []byte, signature []b } func verifyAsymmetric(verify tokenVerifyAsymmetricFunction) tokenVerifyFunction { - return func(key interface{}, hash crypto.Hash, payload []byte, signature []byte) error { + return func(key any, hash crypto.Hash, payload []byte, signature []byte) error { h := hash.New() h.Write(payload) return verify(key, hash, h.Sum([]byte{}), signature) } } -func verifyRSAPKCS(key interface{}, hash crypto.Hash, digest []byte, signature []byte) error { +func verifyRSAPKCS(key any, hash crypto.Hash, digest []byte, signature []byte) error { publicKeyRsa, ok := key.(*rsa.PublicKey) if !ok { return errors.New("incorrect public key type") @@ -764,7 +753,7 @@ func verifyRSAPKCS(key interface{}, hash crypto.Hash, digest []byte, signature [ return nil } -func verifyRSAPSS(key interface{}, hash crypto.Hash, digest []byte, signature []byte) error { +func verifyRSAPSS(key any, hash crypto.Hash, digest []byte, signature []byte) error { publicKeyRsa, ok := key.(*rsa.PublicKey) if !ok { return errors.New("incorrect public key type") @@ -775,7 +764,7 @@ func verifyRSAPSS(key interface{}, hash crypto.Hash, digest []byte, signature [] return nil } -func verifyECDSA(key interface{}, _ crypto.Hash, digest []byte, signature []byte) (err error) { +func verifyECDSA(key any, _ crypto.Hash, digest []byte, signature []byte) (err error) { defer func() { if r := recover(); r != nil { err = fmt.Errorf("ECDSA signature verification error: %v", r) @@ -1005,8 +994,8 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func unverified := ast.ArrayTerm( ast.InternedBooleanTerm(false), - ast.NewTerm(ast.NewObject()), - ast.NewTerm(ast.NewObject()), + ast.InternedEmptyObject, + ast.InternedEmptyObject, ) constraints, err := parseTokenConstraints(b, bctx.Time) if err != nil { @@ -1021,8 +1010,8 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func // FIXME: optimize k, _ := b.Filter(ast.NewObject( - ast.Item(ast.StringTerm("secret"), ast.ObjectTerm()), - ast.Item(ast.StringTerm("cert"), ast.ObjectTerm()), + ast.Item(ast.InternedStringTerm("secret"), ast.InternedEmptyObject), + ast.Item(ast.InternedStringTerm("cert"), ast.InternedEmptyObject), )) if found, th, tp, validSignature := getTokenFromCache(bctx, a, k); found { @@ -1128,7 +1117,7 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func // Check registered claim names against constraints or environment // RFC7159 4.1.1 iss if constraints.iss != "" { - if iss := payload.Get(jwtIssKey); iss != nil { + if iss := payload.Get(ast.InternedStringTerm("iss")); iss != nil { issVal := string(iss.Value.(ast.String)) if constraints.iss != issVal { return iter(unverified) @@ -1138,7 +1127,7 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func } } // RFC7159 4.1.3 aud - if aud := payload.Get(jwtAudKey); aud != nil { + if aud := payload.Get(ast.InternedStringTerm("aud")); aud != nil { if !constraints.validAudience(aud.Value) { return iter(unverified) } @@ -1148,7 +1137,7 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func } } // RFC7159 4.1.4 exp - if exp := payload.Get(jwtExpKey); exp != nil { + if exp := payload.Get(ast.InternedStringTerm("exp")); exp != nil { switch v := exp.Value.(type) { case ast.Number: // constraints.time is in nanoseconds but exp Value is in seconds @@ -1161,7 +1150,7 @@ func builtinJWTDecodeVerify(bctx BuiltinContext, operands []*ast.Term, iter func } } // RFC7159 4.1.5 nbf - if nbf := payload.Get(jwtNbfKey); nbf != nil { + if nbf := payload.Get(ast.InternedStringTerm("nbf")); nbf != nil { switch v := nbf.Value.(type) { case ast.Number: // constraints.time is in nanoseconds but nbf Value is in seconds @@ -1229,7 +1218,7 @@ func validateJWTHeader(h string) (ast.Object, error) { // won't support it for now. // This code checks which kind of JWT we are dealing with according to // RFC 7516 Section 9: https://tools.ietf.org/html/rfc7516#section-9 - if header.Get(jwtEncKey) != nil { + if header.Get(ast.InternedStringTerm("enc")) != nil { return nil, errors.New("JWT is a JWE object, which is not supported") } diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/trace.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/trace.go index 070e254d2..4133935fb 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/trace.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/trace.go @@ -407,7 +407,7 @@ func formatEvent(event *Event, depth int) string { return fmt.Sprintf("%v%v %q", padding, event.Op, event.Message) } - var details interface{} + var details any if node, ok := event.Node.(*ast.Rule); ok { details = node.Path() } else if event.Ref != nil { @@ -417,7 +417,7 @@ func formatEvent(event *Event, depth int) string { } template := "%v%v %v" - opts := []interface{}{padding, event.Op, details} + opts := []any{padding, event.Op, details} if event.Message != "" { template += " %v" @@ -640,9 +640,9 @@ type PrettyEventOpts struct { PrettyVars bool } -func walkTestTerms(x interface{}, f func(*ast.Term) bool) { +func walkTestTerms(x any, f func(*ast.Term) bool) { var vis *ast.GenericVisitor - vis = ast.NewGenericVisitor(func(x interface{}) bool { + vis = ast.NewGenericVisitor(func(x any) bool { switch x := x.(type) { case ast.Call: for _, t := range x[1:] { @@ -785,7 +785,7 @@ func PrettyEvent(w io.Writer, e *Event, opts PrettyEventOpts) error { func printPrettyVars(w *bytes.Buffer, exprVars map[string]varInfo) { containsTabs := false - varRows := make(map[int]interface{}) + varRows := make(map[int]any) for _, info := range exprVars { if len(info.exprLoc.Tabs) > 0 { containsTabs = true @@ -865,12 +865,9 @@ func printArrows(w *bytes.Buffer, l []varInfo, printValueAt int) { for j := range spaces { tab := false - for _, t := range info.exprLoc.Tabs { - if t == j+prevCol+1 { - w.WriteString("\t") - tab = true - break - } + if slices.Contains(info.exprLoc.Tabs, j+prevCol+1) { + w.WriteString("\t") + tab = true } if !tab { w.WriteString(" ") diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/type_name.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/type_name.go index a611e8f30..37e82ff77 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/type_name.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/type_name.go @@ -10,32 +10,22 @@ import ( "github.com/open-policy-agent/opa/v1/ast" ) -var ( - nullStringTerm = ast.StringTerm("null") - booleanStringTerm = ast.StringTerm("boolean") - numberStringTerm = ast.StringTerm("number") - stringStringTerm = ast.StringTerm("string") - arrayStringTerm = ast.StringTerm("array") - objectStringTerm = ast.StringTerm("object") - setStringTerm = ast.StringTerm("set") -) - func builtinTypeName(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { switch operands[0].Value.(type) { case ast.Null: - return iter(nullStringTerm) + return iter(ast.InternedStringTerm("null")) case ast.Boolean: - return iter(booleanStringTerm) + return iter(ast.InternedStringTerm("boolean")) case ast.Number: - return iter(numberStringTerm) + return iter(ast.InternedStringTerm("number")) case ast.String: - return iter(stringStringTerm) + return iter(ast.InternedStringTerm("string")) case *ast.Array: - return iter(arrayStringTerm) + return iter(ast.InternedStringTerm("array")) case ast.Object: - return iter(objectStringTerm) + return iter(ast.InternedStringTerm("object")) case ast.Set: - return iter(setStringTerm) + return iter(ast.InternedStringTerm("set")) } return errors.New("illegal value") diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/uuid.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/uuid.go index d013df9fe..141fb908b 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/uuid.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/uuid.go @@ -26,7 +26,7 @@ func builtinUUIDRFC4122(bctx BuiltinContext, operands []*ast.Term, iter func(*as return err } - result := ast.NewTerm(ast.String(s)) + result := ast.StringTerm(s) bctx.Cache.Put(key, result) return iter(result) diff --git a/vendor/github.com/open-policy-agent/opa/v1/topdown/walk.go b/vendor/github.com/open-policy-agent/opa/v1/topdown/walk.go index 43aa29c97..b85076ac9 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/topdown/walk.go +++ b/vendor/github.com/open-policy-agent/opa/v1/topdown/walk.go @@ -8,8 +8,6 @@ import ( "github.com/open-policy-agent/opa/v1/ast" ) -var emptyArr = ast.ArrayTerm() - func evalWalk(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error) error { input := operands[0] @@ -18,7 +16,7 @@ func evalWalk(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error // we may skip the path construction entirely, and simply return // same pointer in each iteration. This is a *much* more efficient // path when only the values are needed. - return walkNoPath(ast.ArrayTerm(emptyArr, input), iter) + return walkNoPath(ast.ArrayTerm(ast.InternedEmptyArray, input), iter) } filter := getOutputPath(operands) @@ -27,11 +25,20 @@ func evalWalk(_ BuiltinContext, operands []*ast.Term, iter func(*ast.Term) error func walk(filter, path *ast.Array, input *ast.Term, iter func(*ast.Term) error) error { if filter == nil || filter.Len() == 0 { + var pathCopy *ast.Array if path == nil { - path = ast.NewArray() + pathCopy = ast.InternedEmptyArrayValue + } else { + // Shallow copy, as while the array is modified, the elements are not + pathCopy = path.Slice(0, path.Len()) } - if err := iter(ast.ArrayTerm(ast.NewTerm(path.Copy()), input)); err != nil { + // TODO(ae): I'd *really* like these terms to be retrieved from a sync.Pool, and + // returned after iter is called. However, all my atttempts to do this have failed + // as there seems to be something holding on to these references after the call, + // leading to modifications that entirely alter the results. Perhaps this is not + // possible to do, but if it is,it would be a huge performance win. + if err := iter(ast.ArrayTerm(ast.NewTerm(pathCopy), input)); err != nil { return err } } diff --git a/vendor/github.com/open-policy-agent/opa/v1/tracing/tracing.go b/vendor/github.com/open-policy-agent/opa/v1/tracing/tracing.go index 2708b78e2..df2fb434a 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/tracing/tracing.go +++ b/vendor/github.com/open-policy-agent/opa/v1/tracing/tracing.go @@ -11,10 +11,10 @@ package tracing import "net/http" // Options are options for the HTTPTracingService, passed along as-is. -type Options []interface{} +type Options []any // NewOptions is a helper method for constructing `tracing.Options` -func NewOptions(opts ...interface{}) Options { +func NewOptions(opts ...any) Options { return opts } diff --git a/vendor/github.com/open-policy-agent/opa/v1/types/decode.go b/vendor/github.com/open-policy-agent/opa/v1/types/decode.go index e3e1e9837..367b64bff 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/types/decode.go +++ b/vendor/github.com/open-policy-agent/opa/v1/types/decode.go @@ -131,7 +131,7 @@ type rawobject struct { } type rawstaticproperty struct { - Key interface{} `json:"key"` + Key any `json:"key"` Value json.RawMessage `json:"value"` } diff --git a/vendor/github.com/open-policy-agent/opa/v1/types/types.go b/vendor/github.com/open-policy-agent/opa/v1/types/types.go index c661e9666..f8d7db1ef 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/types/types.go +++ b/vendor/github.com/open-policy-agent/opa/v1/types/types.go @@ -62,12 +62,12 @@ type NamedType struct { func (n *NamedType) typeMarker() string { return n.Type.typeMarker() } func (n *NamedType) String() string { return n.Name + ": " + n.Type.String() } func (n *NamedType) MarshalJSON() ([]byte, error) { - var obj map[string]interface{} + var obj map[string]any switch x := n.Type.(type) { - case interface{ toMap() map[string]interface{} }: + case interface{ toMap() map[string]any }: obj = x.toMap() default: - obj = map[string]interface{}{ + obj = map[string]any{ "type": n.Type.typeMarker(), } } @@ -95,7 +95,7 @@ func Named(name string, t Type) *NamedType { // MarshalJSON returns the JSON encoding of t. func (t Null) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ + return json.Marshal(map[string]any{ "type": t.typeMarker(), }) } @@ -126,7 +126,7 @@ func NewBoolean() Boolean { // MarshalJSON returns the JSON encoding of t. func (t Boolean) MarshalJSON() ([]byte, error) { - repr := map[string]interface{}{ + repr := map[string]any{ "type": t.typeMarker(), } return json.Marshal(repr) @@ -149,7 +149,7 @@ func NewString() String { // MarshalJSON returns the JSON encoding of t. func (t String) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ + return json.Marshal(map[string]any{ "type": t.typeMarker(), }) } @@ -171,7 +171,7 @@ func NewNumber() Number { // MarshalJSON returns the JSON encoding of t. func (t Number) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ + return json.Marshal(map[string]any{ "type": t.typeMarker(), }) } @@ -199,8 +199,8 @@ func (t *Array) MarshalJSON() ([]byte, error) { return json.Marshal(t.toMap()) } -func (t *Array) toMap() map[string]interface{} { - repr := map[string]interface{}{ +func (t *Array) toMap() map[string]any { + repr := map[string]any{ "type": t.typeMarker(), } if len(t.static) != 0 { @@ -279,8 +279,8 @@ func (t *Set) MarshalJSON() ([]byte, error) { return json.Marshal(t.toMap()) } -func (t *Set) toMap() map[string]interface{} { - repr := map[string]interface{}{ +func (t *Set) toMap() map[string]any { + repr := map[string]any{ "type": t.typeMarker(), } if t.of != nil { @@ -296,12 +296,12 @@ func (t *Set) String() string { // StaticProperty represents a static object property. type StaticProperty struct { - Key interface{} + Key any Value Type } // NewStaticProperty returns a new StaticProperty object. -func NewStaticProperty(key interface{}, value Type) *StaticProperty { +func NewStaticProperty(key any, value Type) *StaticProperty { return &StaticProperty{ Key: key, Value: value, @@ -310,7 +310,7 @@ func NewStaticProperty(key interface{}, value Type) *StaticProperty { // MarshalJSON returns the JSON encoding of p. func (p *StaticProperty) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ + return json.Marshal(map[string]any{ "key": p.Key, "value": p.Value, }) @@ -332,7 +332,7 @@ func NewDynamicProperty(key, value Type) *DynamicProperty { // MarshalJSON returns the JSON encoding of p. func (p *DynamicProperty) MarshalJSON() ([]byte, error) { - return json.Marshal(map[string]interface{}{ + return json.Marshal(map[string]any{ "key": p.Key, "value": p.Value, }) @@ -394,8 +394,8 @@ func (t *Object) StaticProperties() []*StaticProperty { } // Keys returns the keys of the object's static elements. -func (t *Object) Keys() []interface{} { - sl := make([]interface{}, 0, len(t.static)) +func (t *Object) Keys() []any { + sl := make([]any, 0, len(t.static)) for _, p := range t.static { sl = append(sl, p.Key) } @@ -407,8 +407,8 @@ func (t *Object) MarshalJSON() ([]byte, error) { return json.Marshal(t.toMap()) } -func (t *Object) toMap() map[string]interface{} { - repr := map[string]interface{}{ +func (t *Object) toMap() map[string]any { + repr := map[string]any{ "type": t.typeMarker(), } if len(t.static) != 0 { @@ -421,7 +421,7 @@ func (t *Object) toMap() map[string]interface{} { } // Select returns the type of the named property. -func (t *Object) Select(name interface{}) Type { +func (t *Object) Select(name any) Type { pos := sort.Search(len(t.static), func(x int) bool { return util.Compare(t.static[x].Key, name) >= 0 }) @@ -481,7 +481,7 @@ func mergeObjects(a, b *Object) *Object { dynamicProps = b.dynamic } - staticPropsMap := make(map[interface{}]Type) + staticPropsMap := make(map[any]Type) for _, sp := range a.static { staticPropsMap[sp.Key] = sp.Value @@ -546,8 +546,8 @@ func (t Any) MarshalJSON() ([]byte, error) { return json.Marshal(t.toMap()) } -func (t Any) toMap() map[string]interface{} { - repr := map[string]interface{}{ +func (t Any) toMap() map[string]any { + repr := map[string]any{ "type": t.typeMarker(), } if len(t) != 0 { @@ -588,10 +588,7 @@ func (t Any) Union(other Any) Any { return other } // Prealloc the output list. - maxLen := lenT - if lenT < lenOther { - maxLen = lenOther - } + maxLen := max(lenT, lenOther) merged := make(Any, 0, maxLen) // Note(philipc): Create a merged slice, doing the minimum number of // comparisons along the way. We treat this as a problem of merging two @@ -754,7 +751,7 @@ func (t *Function) String() string { // MarshalJSON returns the JSON encoding of t. func (t *Function) MarshalJSON() ([]byte, error) { - repr := map[string]interface{}{ + repr := map[string]any{ "type": t.typeMarker(), } if len(t.args) > 0 { @@ -897,10 +894,7 @@ func Compare(a, b Type) int { lenStaticA := len(objA.static) lenStaticB := len(objB.static) - minLen := lenStaticA - if lenStaticB < minLen { - minLen = lenStaticB - } + minLen := min(lenStaticB, lenStaticA) for i := range minLen { if cmp := util.Compare(objA.static[i].Key, objB.static[i].Key); cmp != 0 { @@ -994,7 +988,7 @@ func Or(a, b Type) Type { } // Select returns a property or item of a. -func Select(a Type, x interface{}) Type { +func Select(a Type, x any) Type { switch a := unwrap(a).(type) { case *Array: n, ok := x.(json.Number) @@ -1105,17 +1099,13 @@ func Nil(a Type) bool { case nil: return true case *Function: - for i := range a.args { - if Nil(a.args[i]) { - return true - } + if slices.ContainsFunc(a.args, Nil) { + return true } return Nil(a.result) case *Array: - for i := range a.static { - if Nil(a.static[i]) { - return true - } + if slices.ContainsFunc(a.static, Nil) { + return true } if a.dynamic != nil { return Nil(a.dynamic) @@ -1136,7 +1126,7 @@ func Nil(a Type) bool { } // TypeOf returns the type of the Golang native value. -func TypeOf(x interface{}) Type { +func TypeOf(x any) Type { switch x := x.(type) { case nil: return Nl @@ -1146,22 +1136,22 @@ func TypeOf(x interface{}) Type { return S case json.Number: return N - case map[string]interface{}: - // The ast.ValueToInterface() function returns ast.Object values as map[string]interface{} - // so map[string]interface{} must be handled here because the type checker uses the value + case map[string]any: + // The ast.ValueToInterface() function returns ast.Object values as map[string]any + // so map[string]any must be handled here because the type checker uses the value // to interface conversion when inferring object types. static := make([]*StaticProperty, 0, len(x)) for k, v := range x { static = append(static, NewStaticProperty(k, TypeOf(v))) } return NewObject(static, nil) - case map[interface{}]interface{}: + case map[any]any: static := make([]*StaticProperty, 0, len(x)) for k, v := range x { static = append(static, NewStaticProperty(k, TypeOf(v))) } return NewObject(static, nil) - case []interface{}: + case []any: static := make([]Type, len(x)) for i := range x { static[i] = TypeOf(x[i]) @@ -1178,10 +1168,7 @@ func (s typeSlice) Swap(i, j int) { s[i], s[j] = s[j], s[i] } func (s typeSlice) Len() int { return len(s) } func typeSliceCompare(a, b []Type) int { - minLen := len(a) - if len(b) < minLen { - minLen = len(b) - } + minLen := min(len(b), len(a)) for i := range minLen { if cmp := Compare(a[i], b[i]); cmp != 0 { return cmp diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/compare.go b/vendor/github.com/open-policy-agent/opa/v1/util/compare.go index 2569375b1..df78f6475 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/util/compare.go +++ b/vendor/github.com/open-policy-agent/opa/v1/util/compare.go @@ -13,10 +13,10 @@ import ( // Compare returns 0 if a equals b, -1 if a is less than b, and 1 if b is than a. // // For comparison between values of different types, the following ordering is used: -// nil < bool < int, float64 < string < []interface{} < map[string]interface{}. Slices and maps +// nil < bool < int, float64 < string < []any < map[string]any. Slices and maps // are compared recursively. If one slice or map is a subset of the other slice or map // it is considered "less than". Nil is always equal to nil. -func Compare(a, b interface{}) int { +func Compare(a, b any) int { aSortOrder := sortOrder(a) bSortOrder := sortOrder(b) if aSortOrder < bSortOrder { @@ -73,15 +73,12 @@ func Compare(a, b interface{}) int { } return 1 } - case []interface{}: + case []any: switch b := b.(type) { - case []interface{}: + case []any: bLen := len(b) aLen := len(a) - minLen := aLen - if bLen < minLen { - minLen = bLen - } + minLen := min(bLen, aLen) for i := range minLen { cmp := Compare(a[i], b[i]) if cmp != 0 { @@ -95,17 +92,14 @@ func Compare(a, b interface{}) int { } return 1 } - case map[string]interface{}: + case map[string]any: switch b := b.(type) { - case map[string]interface{}: + case map[string]any: aKeys := KeysSorted(a) bKeys := KeysSorted(b) aLen := len(aKeys) bLen := len(bKeys) - minLen := aLen - if bLen < minLen { - minLen = bLen - } + minLen := min(bLen, aLen) for i := range minLen { if aKeys[i] < bKeys[i] { return -1 @@ -152,7 +146,7 @@ func compareJSONNumber(a, b json.Number) int { return bigA.Cmp(bigB) } -func sortOrder(v interface{}) int { +func sortOrder(v any) int { switch v.(type) { case nil: return nilSort @@ -166,9 +160,9 @@ func sortOrder(v interface{}) int { return numberSort case string: return stringSort - case []interface{}: + case []any: return arraySort - case map[string]interface{}: + case map[string]any: return objectSort } panic(fmt.Sprintf("illegal argument of type %T", v)) diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/hashmap.go b/vendor/github.com/open-policy-agent/opa/v1/util/hashmap.go index cf6a385f4..69a90cbb5 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/util/hashmap.go +++ b/vendor/github.com/open-policy-agent/opa/v1/util/hashmap.go @@ -10,7 +10,7 @@ import ( ) // T is a concise way to refer to T. -type T interface{} +type T any type Hasher interface { Hash() int @@ -64,7 +64,7 @@ func NewHashMap(eq func(T, T) bool, hash func(T) int) *HashMap { // Copy returns a shallow copy of this HashMap. func (h *TypedHashMap[K, V]) Copy() *TypedHashMap[K, V] { - cpy := NewTypedHashMap[K, V](h.keq, h.veq, h.khash, h.vhash, h.def) + cpy := NewTypedHashMap(h.keq, h.veq, h.khash, h.vhash, h.def) h.Iter(func(k K, v V) bool { cpy.Put(k, v) return false diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/json.go b/vendor/github.com/open-policy-agent/opa/v1/util/json.go index 5a4e460b6..fdb2626c7 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/util/json.go +++ b/vendor/github.com/open-policy-agent/opa/v1/util/json.go @@ -21,11 +21,11 @@ import ( // // This function is intended to be used in place of the standard json.Marshal // function when json.Number is required. -func UnmarshalJSON(bs []byte, x interface{}) error { +func UnmarshalJSON(bs []byte, x any) error { return unmarshalJSON(bs, x, true) } -func unmarshalJSON(bs []byte, x interface{}, ext bool) error { +func unmarshalJSON(bs []byte, x any, ext bool) error { buf := bytes.NewBuffer(bs) decoder := NewJSONDecoder(buf) if err := decoder.Decode(x); err != nil { @@ -61,8 +61,8 @@ func NewJSONDecoder(r io.Reader) *json.Decoder { // // If the data cannot be decoded, this function will panic. This function is for // test purposes. -func MustUnmarshalJSON(bs []byte) interface{} { - var x interface{} +func MustUnmarshalJSON(bs []byte) any { + var x any if err := UnmarshalJSON(bs, &x); err != nil { panic(err) } @@ -73,7 +73,7 @@ func MustUnmarshalJSON(bs []byte) interface{} { // // If the data cannot be encoded, this function will panic. This function is for // test purposes. -func MustMarshalJSON(x interface{}) []byte { +func MustMarshalJSON(x any) []byte { bs, err := json.Marshal(x) if err != nil { panic(err) @@ -86,7 +86,7 @@ func MustMarshalJSON(x interface{}) []byte { // Thereby, it is converting its argument to the representation expected by // rego.Input and inmem's Write operations. Works with both references and // values. -func RoundTrip(x *interface{}) error { +func RoundTrip(x *any) error { bs, err := json.Marshal(x) if err != nil { return err @@ -99,8 +99,8 @@ func RoundTrip(x *interface{}) error { // // Used for preparing Go types (including pointers to structs) into values to be // put through util.RoundTrip(). -func Reference(x interface{}) *interface{} { - var y interface{} +func Reference(x any) *any { + var y any rv := reflect.ValueOf(x) if rv.Kind() == reflect.Ptr { return Reference(rv.Elem().Interface()) @@ -113,7 +113,7 @@ func Reference(x interface{}) *interface{} { } // Unmarshal decodes a YAML, JSON or JSON extension value into the specified type. -func Unmarshal(bs []byte, v interface{}) error { +func Unmarshal(bs []byte, v any) error { if len(bs) > 2 && bs[0] == 0xef && bs[1] == 0xbb && bs[2] == 0xbf { bs = bs[3:] // Strip UTF-8 BOM, see https://www.rfc-editor.org/rfc/rfc8259#section-8.1 } diff --git a/vendor/github.com/open-policy-agent/opa/v1/util/read_gzip_body.go b/vendor/github.com/open-policy-agent/opa/v1/util/read_gzip_body.go index b979d0bd0..ddffe2a4d 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/util/read_gzip_body.go +++ b/vendor/github.com/open-policy-agent/opa/v1/util/read_gzip_body.go @@ -14,7 +14,7 @@ import ( ) var gzipReaderPool = sync.Pool{ - New: func() interface{} { + New: func() any { reader := new(gzip.Reader) return reader }, diff --git a/vendor/github.com/open-policy-agent/opa/v1/version/version.go b/vendor/github.com/open-policy-agent/opa/v1/version/version.go index f8795429f..c50ecfcb1 100644 --- a/vendor/github.com/open-policy-agent/opa/v1/version/version.go +++ b/vendor/github.com/open-policy-agent/opa/v1/version/version.go @@ -10,8 +10,7 @@ import ( "runtime/debug" ) -// Version is the canonical version of OPA. -var Version = "1.4.2" +var Version = "1.5.0" // GoVersion is the version of Go this was built with var GoVersion = runtime.Version() @@ -32,18 +31,28 @@ func init() { if !ok { return } - dirty := false + var dirty bool + var binTimestamp, binVcs string + for _, s := range bi.Settings { switch s.Key { case "vcs.time": - Timestamp = s.Value + binTimestamp = s.Value case "vcs.revision": - Vcs = s.Value + binVcs = s.Value case "vcs.modified": dirty = s.Value == "true" } } - if dirty { - Vcs += "-dirty" + + if Timestamp == "" { + Timestamp = binTimestamp + } + + if Vcs == "" { + Vcs = binVcs + if dirty { + Vcs += "-dirty" + } } } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/LICENSE b/vendor/github.com/vektah/gqlparser/v2/LICENSE similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/LICENSE rename to vendor/github.com/vektah/gqlparser/v2/LICENSE diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/argmap.go b/vendor/github.com/vektah/gqlparser/v2/ast/argmap.go similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/argmap.go rename to vendor/github.com/vektah/gqlparser/v2/ast/argmap.go diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/collections.go b/vendor/github.com/vektah/gqlparser/v2/ast/collections.go similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/collections.go rename to vendor/github.com/vektah/gqlparser/v2/ast/collections.go diff --git a/vendor/github.com/vektah/gqlparser/v2/ast/comment.go b/vendor/github.com/vektah/gqlparser/v2/ast/comment.go new file mode 100644 index 000000000..8fcfda581 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/ast/comment.go @@ -0,0 +1,31 @@ +package ast + +import ( + "strconv" + "strings" +) + +type Comment struct { + Value string + Position *Position +} + +func (c *Comment) Text() string { + return strings.TrimPrefix(c.Value, "#") +} + +type CommentGroup struct { + List []*Comment +} + +func (c *CommentGroup) Dump() string { + if len(c.List) == 0 { + return "" + } + var builder strings.Builder + for _, comment := range c.List { + builder.WriteString(comment.Value) + builder.WriteString("\n") + } + return strconv.Quote(builder.String()) +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/decode.go b/vendor/github.com/vektah/gqlparser/v2/ast/decode.go similarity index 99% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/decode.go rename to vendor/github.com/vektah/gqlparser/v2/ast/decode.go index d00920554..c9966b244 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/decode.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/decode.go @@ -11,7 +11,7 @@ func UnmarshalSelectionSet(b []byte) (SelectionSet, error) { return nil, err } - var result = make([]Selection, 0) + result := make([]Selection, 0) for _, item := range tmp { var field Field if err := json.Unmarshal(item, &field); err == nil { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/definition.go b/vendor/github.com/vektah/gqlparser/v2/ast/definition.go similarity index 84% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/definition.go rename to vendor/github.com/vektah/gqlparser/v2/ast/definition.go index ee3d4df3a..9ceebf1be 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/definition.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/definition.go @@ -31,6 +31,10 @@ type Definition struct { Position *Position `dump:"-" json:"-"` BuiltIn bool `dump:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup + EndOfDefinitionComment *CommentGroup } func (d *Definition) IsLeafType() bool { @@ -66,6 +70,9 @@ type FieldDefinition struct { Type *Type Directives DirectiveList Position *Position `dump:"-" json:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup } type ArgumentDefinition struct { @@ -75,6 +82,9 @@ type ArgumentDefinition struct { Type *Type Directives DirectiveList Position *Position `dump:"-" json:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup } type EnumValueDefinition struct { @@ -82,6 +92,9 @@ type EnumValueDefinition struct { Name string Directives DirectiveList Position *Position `dump:"-" json:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup } type DirectiveDefinition struct { @@ -91,4 +104,7 @@ type DirectiveDefinition struct { Locations []DirectiveLocation IsRepeatable bool Position *Position `dump:"-" json:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/directive.go b/vendor/github.com/vektah/gqlparser/v2/ast/directive.go similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/directive.go rename to vendor/github.com/vektah/gqlparser/v2/ast/directive.go diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/document.go b/vendor/github.com/vektah/gqlparser/v2/ast/document.go similarity index 86% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/document.go rename to vendor/github.com/vektah/gqlparser/v2/ast/document.go index 4a6654b9a..e2520ffb7 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/document.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/document.go @@ -4,6 +4,7 @@ type QueryDocument struct { Operations OperationList Fragments FragmentDefinitionList Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } type SchemaDocument struct { @@ -13,6 +14,7 @@ type SchemaDocument struct { Definitions DefinitionList Extensions DefinitionList Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } func (d *SchemaDocument) Merge(other *SchemaDocument) { @@ -24,9 +26,10 @@ func (d *SchemaDocument) Merge(other *SchemaDocument) { } type Schema struct { - Query *Definition - Mutation *Definition - Subscription *Definition + Query *Definition + Mutation *Definition + Subscription *Definition + SchemaDirectives DirectiveList Types map[string]*Definition Directives map[string]*DirectiveDefinition @@ -35,6 +38,8 @@ type Schema struct { Implements map[string][]*Definition Description string + + Comment *CommentGroup } // AddTypes is the helper to add types definition to the schema @@ -70,10 +75,15 @@ type SchemaDefinition struct { Directives DirectiveList OperationTypes OperationTypeDefinitionList Position *Position `dump:"-" json:"-"` + + BeforeDescriptionComment *CommentGroup + AfterDescriptionComment *CommentGroup + EndOfDefinitionComment *CommentGroup } type OperationTypeDefinition struct { Operation Operation Type string Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/dumper.go b/vendor/github.com/vektah/gqlparser/v2/ast/dumper.go similarity index 88% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/dumper.go rename to vendor/github.com/vektah/gqlparser/v2/ast/dumper.go index 84266a618..e9ea88a12 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/dumper.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/dumper.go @@ -40,13 +40,13 @@ func (d *dumper) dump(v reflect.Value) { d.WriteString("false") } case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - d.WriteString(strconv.FormatInt(v.Int(), 10)) + fmt.Fprintf(d, "%d", v.Int()) case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - d.WriteString(strconv.FormatUint(v.Uint(), 10)) + fmt.Fprintf(d, "%d", v.Uint()) case reflect.Float32, reflect.Float64: - d.WriteString(fmt.Sprintf("%.2f", v.Float())) + fmt.Fprintf(d, "%.2f", v.Float()) case reflect.String: if v.Type().Name() != "string" { @@ -70,11 +70,11 @@ func (d *dumper) dump(v reflect.Value) { } func (d *dumper) writeIndent() { - d.Buffer.WriteString(strings.Repeat(" ", d.indent)) + d.WriteString(strings.Repeat(" ", d.indent)) } func (d *dumper) nl() { - d.Buffer.WriteByte('\n') + d.WriteByte('\n') d.writeIndent() } @@ -88,7 +88,7 @@ func typeName(t reflect.Type) string { func (d *dumper) dumpArray(v reflect.Value) { d.WriteString("[" + typeName(v.Type().Elem()) + "]") - for i := range v.Len() { + for i := 0; i < v.Len(); i++ { d.nl() d.WriteString("- ") d.indent++ @@ -102,7 +102,7 @@ func (d *dumper) dumpStruct(v reflect.Value) { d.indent++ typ := v.Type() - for i := range v.NumField() { + for i := 0; i < v.NumField(); i++ { f := v.Field(i) if typ.Field(i).Tag.Get("dump") == "-" { continue @@ -132,13 +132,13 @@ func isZero(v reflect.Value) bool { return true } z := true - for i := range v.Len() { + for i := 0; i < v.Len(); i++ { z = z && isZero(v.Index(i)) } return z case reflect.Struct: z := true - for i := range v.NumField() { + for i := 0; i < v.NumField(); i++ { z = z && isZero(v.Field(i)) } return z diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/fragment.go b/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go similarity index 91% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/fragment.go rename to vendor/github.com/vektah/gqlparser/v2/ast/fragment.go index 723d83399..05805e108 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/fragment.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/fragment.go @@ -9,6 +9,7 @@ type FragmentSpread struct { Definition *FragmentDefinition Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } type InlineFragment struct { @@ -20,6 +21,7 @@ type InlineFragment struct { ObjectDefinition *Definition Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } type FragmentDefinition struct { @@ -35,4 +37,5 @@ type FragmentDefinition struct { Definition *Definition Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/operation.go b/vendor/github.com/vektah/gqlparser/v2/ast/operation.go similarity index 91% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/operation.go rename to vendor/github.com/vektah/gqlparser/v2/ast/operation.go index 5fc2f3b24..2efed025b 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/operation.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/operation.go @@ -15,6 +15,7 @@ type OperationDefinition struct { Directives DirectiveList SelectionSet SelectionSet Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } type VariableDefinition struct { @@ -23,6 +24,7 @@ type VariableDefinition struct { DefaultValue *Value Directives DirectiveList Position *Position `dump:"-" json:"-"` + Comment *CommentGroup // Requires validation Definition *Definition diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/path.go b/vendor/github.com/vektah/gqlparser/v2/ast/path.go similarity index 91% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/path.go rename to vendor/github.com/vektah/gqlparser/v2/ast/path.go index be1a9e4ed..f40aa953d 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/path.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/path.go @@ -14,10 +14,15 @@ type PathElement interface { isPathElement() } -var _ PathElement = PathIndex(0) -var _ PathElement = PathName("") +var ( + _ PathElement = PathIndex(0) + _ PathElement = PathName("") +) func (path Path) String() string { + if path == nil { + return "" + } var str bytes.Buffer for i, v := range path { switch v := v.(type) { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/selection.go b/vendor/github.com/vektah/gqlparser/v2/ast/selection.go similarity index 70% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/selection.go rename to vendor/github.com/vektah/gqlparser/v2/ast/selection.go index 677300edd..1858dc213 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/selection.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/selection.go @@ -11,9 +11,9 @@ func (*Field) isSelection() {} func (*FragmentSpread) isSelection() {} func (*InlineFragment) isSelection() {} -func (s *Field) GetPosition() *Position { return s.Position } +func (f *Field) GetPosition() *Position { return f.Position } func (s *FragmentSpread) GetPosition() *Position { return s.Position } -func (s *InlineFragment) GetPosition() *Position { return s.Position } +func (f *InlineFragment) GetPosition() *Position { return f.Position } type Field struct { Alias string @@ -22,6 +22,7 @@ type Field struct { Directives DirectiveList SelectionSet SelectionSet Position *Position `dump:"-" json:"-"` + Comment *CommentGroup // Require validation Definition *FieldDefinition @@ -32,8 +33,9 @@ type Argument struct { Name string Value *Value Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } -func (s *Field) ArgumentMap(vars map[string]interface{}) map[string]interface{} { - return arg2map(s.Definition.Arguments, s.Arguments, vars) +func (f *Field) ArgumentMap(vars map[string]interface{}) map[string]interface{} { + return arg2map(f.Definition.Arguments, f.Arguments, vars) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/source.go b/vendor/github.com/vektah/gqlparser/v2/ast/source.go similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/source.go rename to vendor/github.com/vektah/gqlparser/v2/ast/source.go diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/type.go b/vendor/github.com/vektah/gqlparser/v2/ast/type.go similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/type.go rename to vendor/github.com/vektah/gqlparser/v2/ast/type.go diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/value.go b/vendor/github.com/vektah/gqlparser/v2/ast/value.go similarity index 98% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/value.go rename to vendor/github.com/vektah/gqlparser/v2/ast/value.go index ae23a98d7..45fa8016b 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/ast/value.go +++ b/vendor/github.com/vektah/gqlparser/v2/ast/value.go @@ -26,6 +26,7 @@ type Value struct { Children ChildValueList Kind ValueKind Position *Position `dump:"-" json:"-"` + Comment *CommentGroup // Require validation Definition *Definition @@ -37,6 +38,7 @@ type ChildValue struct { Name string Value *Value Position *Position `dump:"-" json:"-"` + Comment *CommentGroup } func (v *Value) Value(vars map[string]interface{}) (interface{}, error) { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/gqlerror/error.go b/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go similarity index 75% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/gqlerror/error.go rename to vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go index 58d1c1bd6..ca9036ca7 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/gqlerror/error.go +++ b/vendor/github.com/vektah/gqlparser/v2/gqlerror/error.go @@ -1,17 +1,17 @@ package gqlerror import ( - "bytes" "errors" "fmt" "strconv" + "strings" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" ) -// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors +// Error is the standard graphql error type described in https://spec.graphql.org/draft/#sec-Errors type Error struct { - err error `json:"-"` + Err error `json:"-"` Message string `json:"message"` Path ast.Path `json:"path,omitempty"` Locations []Location `json:"locations,omitempty"` @@ -38,7 +38,7 @@ type Location struct { type List []*Error func (err *Error) Error() string { - var res bytes.Buffer + var res strings.Builder if err == nil { return "" } @@ -66,16 +66,23 @@ func (err *Error) Error() string { return res.String() } -func (err Error) pathString() string { +func (err *Error) pathString() string { return err.Path.String() } -func (err Error) Unwrap() error { - return err.err +func (err *Error) Unwrap() error { + return err.Err +} + +func (err *Error) AsError() error { + if err == nil { + return nil + } + return err } func (errs List) Error() string { - var buf bytes.Buffer + var buf strings.Builder for _, err := range errs { buf.WriteString(err.Error()) buf.WriteByte('\n') @@ -101,14 +108,48 @@ func (errs List) As(target interface{}) bool { return false } +func (errs List) Unwrap() []error { + l := make([]error, len(errs)) + for i, err := range errs { + l[i] = err + } + return l +} + func WrapPath(path ast.Path, err error) *Error { + if err == nil { + return nil + } return &Error{ - err: err, + Err: err, Message: err.Error(), Path: path, } } +func Wrap(err error) *Error { + if err == nil { + return nil + } + return &Error{ + Err: err, + Message: err.Error(), + } +} + +func WrapIfUnwrapped(err error) *Error { + if err == nil { + return nil + } + if gqlErr, ok := err.(*Error); ok { + return gqlErr + } + return &Error{ + Err: err, + Message: err.Error(), + } +} + func Errorf(message string, args ...interface{}) *Error { return &Error{ Message: fmt.Sprintf(message, args...), diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/blockstring.go b/vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/blockstring.go rename to vendor/github.com/vektah/gqlparser/v2/lexer/blockstring.go diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/lexer.go b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go similarity index 91% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/lexer.go rename to vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go index f25555e65..1cbb4a030 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/lexer.go +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer.go @@ -4,8 +4,8 @@ import ( "bytes" "unicode/utf8" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - "github.com/open-policy-agent/opa/internal/gqlparser/gqlerror" + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" ) // Lexer turns graphql request and schema strings into tokens @@ -55,7 +55,7 @@ func (s *Lexer) makeValueToken(kind Type, value string) (Token, error) { }, nil } -func (s *Lexer) makeError(format string, args ...interface{}) (Token, error) { +func (s *Lexer) makeError(format string, args ...interface{}) (Token, *gqlerror.Error) { column := s.endRunes - s.lineStartRunes + 1 return Token{ Kind: Invalid, @@ -66,7 +66,7 @@ func (s *Lexer) makeError(format string, args ...interface{}) (Token, error) { Column: column, Src: s.Source, }, - }, gqlerror.ErrorLocf(s.Source.Name, s.line, column, format, args...) + }, gqlerror.ErrorLocf(s.Name, s.line, column, format, args...) } // ReadToken gets the next token from the source starting at the given position. @@ -74,8 +74,7 @@ func (s *Lexer) makeError(format string, args ...interface{}) (Token, error) { // This skips over whitespace and comments until it finds the next lexable // token, then lexes punctuators immediately or calls the appropriate helper // function for more complicated tokens. -func (s *Lexer) ReadToken() (token Token, err error) { - +func (s *Lexer) ReadToken() (Token, error) { s.ws() s.start = s.end s.startRunes = s.endRunes @@ -121,10 +120,7 @@ func (s *Lexer) ReadToken() (token Token, err error) { case '|': return s.makeValueToken(Pipe, "") case '#': - if comment, err := s.readComment(); err != nil { - return comment, err - } - return s.ReadToken() + return s.readComment() case '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z': return s.readName() @@ -258,7 +254,6 @@ func (s *Lexer) readNumber() (Token, error) { return s.makeToken(Float) } return s.makeToken(Int) - } // acceptByte if it matches any of given bytes, returning true if it found anything @@ -321,8 +316,8 @@ func (s *Lexer) readString() (Token, error) { } switch r { default: - var char = rune(r) - var w = 1 + char := rune(r) + w := 1 // skip unicode overhead if we are in the ascii range if r >= 127 { @@ -426,17 +421,29 @@ func (s *Lexer) readBlockString() (Token, error) { r := s.Input[s.end] // Closing triple quote (""") - if r == '"' && s.end+3 <= inputLen && s.Input[s.end:s.end+3] == `"""` { - t, err := s.makeValueToken(BlockString, blockStringValue(buf.String())) + if r == '"' { + // Count consecutive quotes + quoteCount := 1 + i := s.end + 1 + for i < inputLen && s.Input[i] == '"' { + quoteCount++ + i++ + } - // the token should not include the quotes in its value, but should cover them in its position - t.Pos.Start -= 3 - t.Pos.End += 3 + // If we have at least 3 quotes, use the last 3 as the closing quote + if quoteCount >= 3 { + // Add any extra quotes to the buffer (except the last 3) + for j := 0; j < quoteCount-3; j++ { + buf.WriteByte('"') + } - // skip the close quote - s.end += 3 - s.endRunes += 3 - return t, err + t, err := s.makeValueToken(BlockString, blockStringValue(buf.String())) + t.Pos.Start -= 3 + t.Pos.End += 3 + s.end += quoteCount + s.endRunes += quoteCount + return t, err + } } // SourceCharacter @@ -444,11 +451,12 @@ func (s *Lexer) readBlockString() (Token, error) { return s.makeError(`Invalid character within String: "\u%04d".`, r) } - if r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""` { + switch { + case r == '\\' && s.end+4 <= inputLen && s.Input[s.end:s.end+4] == `\"""`: buf.WriteString(`"""`) s.end += 4 s.endRunes += 4 - } else if r == '\r' { + case r == '\r': if s.end+1 < inputLen && s.Input[s.end+1] == '\n' { s.end++ s.endRunes++ @@ -459,9 +467,9 @@ func (s *Lexer) readBlockString() (Token, error) { s.endRunes++ s.line++ s.lineStartRunes = s.endRunes - } else { - var char = rune(r) - var w = 1 + default: + char := rune(r) + w := 1 // skip unicode overhead if we are in the ascii range if r >= 127 { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/lexer_test.yml b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml similarity index 91% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/lexer_test.yml rename to vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml index 5c4d5f0ff..0899f4ca9 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/lexer_test.yml +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/lexer_test.yml @@ -26,6 +26,31 @@ simple tokens: column: 3 value: 'foo' + - name: records line and column with comments + input: "\n\n\n#foo\n #bar\n foo\n" + tokens: + - + kind: COMMENT + start: 3 + end: 7 + line: 4 + column: 0 + value: '#foo' + - + kind: COMMENT + start: 10 + end: 14 + line: 5 + column: 3 + value: '#bar' + - + kind: NAME + start: 17 + end: 20 + line: 6 + column: 3 + value: 'foo' + - name: skips whitespace input: "\n\n foo\n\n\n" tokens: @@ -35,15 +60,6 @@ simple tokens: end: 9 value: 'foo' - - name: skips comments - input: "\n #comment\n foo#comment\n" - tokens: - - - kind: NAME - start: 18 - end: 21 - value: 'foo' - - name: skips commas input: ",,,foo,,," tokens: @@ -78,6 +94,57 @@ simple tokens: end: 1 value: a +lexes comments: + - name: basic + input: '#simple' + tokens: + - + kind: COMMENT + start: 0 + end: 7 + value: '#simple' + + - name: two lines + input: "#first\n#second" + tokens: + - + kind: COMMENT + start: 0 + end: 6 + value: "#first" + - + kind: COMMENT + start: 7 + end: 14 + value: "#second" + + - name: whitespace + input: '# white space ' + tokens: + - + kind: COMMENT + start: 0 + end: 14 + value: '# white space ' + + - name: not escaped + input: '#not escaped \n\r\b\t\f' + tokens: + - + kind: COMMENT + start: 0 + end: 23 + value: '#not escaped \n\r\b\t\f' + + - name: slashes + input: '#slashes \\ \/' + tokens: + - + kind: COMMENT + start: 0 + end: 14 + value: '#slashes \\ \/' + lexes strings: - name: basic input: '"simple"' @@ -674,7 +741,6 @@ lex reports useful unknown character error: - name: question mark input: "?" error: - message: 'Cannot parse the unexpected character "?".' message: 'Cannot parse the unexpected character "?".' locations: [{ line: 1, column: 1 }] diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/token.go b/vendor/github.com/vektah/gqlparser/v2/lexer/token.go similarity index 97% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/token.go rename to vendor/github.com/vektah/gqlparser/v2/lexer/token.go index 79eefd0f4..8985a7efb 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/lexer/token.go +++ b/vendor/github.com/vektah/gqlparser/v2/lexer/token.go @@ -3,7 +3,7 @@ package lexer import ( "strconv" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" ) const ( diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/parser.go b/vendor/github.com/vektah/gqlparser/v2/parser/parser.go similarity index 54% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/parser.go rename to vendor/github.com/vektah/gqlparser/v2/parser/parser.go index c0d2b4a3b..bfcf7ea49 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/parser.go +++ b/vendor/github.com/vektah/gqlparser/v2/parser/parser.go @@ -1,11 +1,12 @@ package parser import ( + "fmt" "strconv" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - "github.com/open-policy-agent/opa/internal/gqlparser/gqlerror" - "github.com/open-policy-agent/opa/internal/gqlparser/lexer" + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/lexer" ) type parser struct { @@ -17,6 +18,53 @@ type parser struct { peekError error prev lexer.Token + + comment *ast.CommentGroup + commentConsuming bool + + tokenCount int + maxTokenLimit int +} + +func (p *parser) SetMaxTokenLimit(maxToken int) { + p.maxTokenLimit = maxToken +} + +func (p *parser) consumeComment() (*ast.Comment, bool) { + if p.err != nil { + return nil, false + } + tok := p.peek() + if tok.Kind != lexer.Comment { + return nil, false + } + p.next() + return &ast.Comment{ + Value: tok.Value, + Position: &tok.Pos, + }, true +} + +func (p *parser) consumeCommentGroup() { + if p.err != nil { + return + } + if p.commentConsuming { + return + } + p.commentConsuming = true + + var comments []*ast.Comment + for { + comment, ok := p.consumeComment() + if !ok { + break + } + comments = append(comments, comment) + } + + p.comment = &ast.CommentGroup{List: comments} + p.commentConsuming = false } func (p *parser) peekPos() *ast.Position { @@ -36,6 +84,9 @@ func (p *parser) peek() lexer.Token { if !p.peeked { p.peekToken, p.peekError = p.lexer.ReadToken() p.peeked = true + if p.peekToken.Kind == lexer.Comment { + p.consumeCommentGroup() + } } return p.peekToken @@ -52,33 +103,45 @@ func (p *parser) next() lexer.Token { if p.err != nil { return p.prev } + // Increment the token count before reading the next token + p.tokenCount++ + if p.maxTokenLimit != 0 && p.tokenCount > p.maxTokenLimit { + p.err = fmt.Errorf("exceeded token limit of %d", p.maxTokenLimit) + return p.prev + } if p.peeked { p.peeked = false + p.comment = nil p.prev, p.err = p.peekToken, p.peekError } else { p.prev, p.err = p.lexer.ReadToken() + if p.prev.Kind == lexer.Comment { + p.consumeCommentGroup() + } } return p.prev } -func (p *parser) expectKeyword(value string) lexer.Token { +func (p *parser) expectKeyword(value string) (lexer.Token, *ast.CommentGroup) { tok := p.peek() + comment := p.comment if tok.Kind == lexer.Name && tok.Value == value { - return p.next() + return p.next(), comment } p.error(tok, "Expected %s, found %s", strconv.Quote(value), tok.String()) - return tok + return tok, comment } -func (p *parser) expect(kind lexer.Type) lexer.Token { +func (p *parser) expect(kind lexer.Type) (lexer.Token, *ast.CommentGroup) { tok := p.peek() + comment := p.comment if tok.Kind == kind { - return p.next() + return p.next(), comment } p.error(tok, "Expected %s, found %s", kind, tok.Kind.String()) - return tok + return tok, comment } func (p *parser) skip(kind lexer.Type) bool { @@ -115,10 +178,10 @@ func (p *parser) many(start lexer.Type, end lexer.Type, cb func()) { p.next() } -func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) { +func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) *ast.CommentGroup { hasDef := p.skip(start) if !hasDef { - return + return nil } called := false @@ -129,8 +192,10 @@ func (p *parser) some(start lexer.Type, end lexer.Type, cb func()) { if !called { p.error(p.peek(), "expected at least one definition, found %s", p.peek().Kind.String()) - return + return nil } + comment := p.comment p.next() + return comment } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/query.go b/vendor/github.com/vektah/gqlparser/v2/parser/query.go similarity index 85% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/query.go rename to vendor/github.com/vektah/gqlparser/v2/parser/query.go index 319425f58..47ac214a9 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/query.go +++ b/vendor/github.com/vektah/gqlparser/v2/parser/query.go @@ -1,15 +1,23 @@ package parser import ( - "github.com/open-policy-agent/opa/internal/gqlparser/lexer" + "github.com/vektah/gqlparser/v2/lexer" - //nolint:revive - . "github.com/open-policy-agent/opa/internal/gqlparser/ast" + . "github.com/vektah/gqlparser/v2/ast" //nolint:staticcheck // bad, yeah ) func ParseQuery(source *Source) (*QueryDocument, error) { p := parser{ - lexer: lexer.New(source), + lexer: lexer.New(source), + maxTokenLimit: 0, // 0 means unlimited + } + return p.parseQueryDocument(), p.err +} + +func ParseQueryWithTokenLimit(source *Source, maxTokenLimit int) (*QueryDocument, error) { + p := parser{ + lexer: lexer.New(source), + maxTokenLimit: maxTokenLimit, } return p.parseQueryDocument(), p.err } @@ -45,6 +53,7 @@ func (p *parser) parseOperationDefinition() *OperationDefinition { if p.peek().Kind == lexer.BraceL { return &OperationDefinition{ Position: p.peekPos(), + Comment: p.comment, Operation: Query, SelectionSet: p.parseRequiredSelectionSet(), } @@ -52,6 +61,7 @@ func (p *parser) parseOperationDefinition() *OperationDefinition { var od OperationDefinition od.Position = p.peekPos() + od.Comment = p.comment od.Operation = p.parseOperationType() if p.peek().Kind == lexer.Name { @@ -81,7 +91,7 @@ func (p *parser) parseOperationType() Operation { func (p *parser) parseVariableDefinitions() VariableDefinitionList { var defs []*VariableDefinition - p.many(lexer.ParenL, lexer.ParenR, func() { + p.some(lexer.ParenL, lexer.ParenR, func() { defs = append(defs, p.parseVariableDefinition()) }) @@ -91,6 +101,7 @@ func (p *parser) parseVariableDefinitions() VariableDefinitionList { func (p *parser) parseVariableDefinition() *VariableDefinition { var def VariableDefinition def.Position = p.peekPos() + def.Comment = p.comment def.Variable = p.parseVariable() p.expect(lexer.Colon) @@ -117,7 +128,7 @@ func (p *parser) parseOptionalSelectionSet() SelectionSet { selections = append(selections, p.parseSelection()) }) - return SelectionSet(selections) + return selections } func (p *parser) parseRequiredSelectionSet() SelectionSet { @@ -131,7 +142,7 @@ func (p *parser) parseRequiredSelectionSet() SelectionSet { selections = append(selections, p.parseSelection()) }) - return SelectionSet(selections) + return selections } func (p *parser) parseSelection() Selection { @@ -144,6 +155,7 @@ func (p *parser) parseSelection() Selection { func (p *parser) parseField() *Field { var field Field field.Position = p.peekPos() + field.Comment = p.comment field.Alias = p.parseName() if p.skip(lexer.Colon) { @@ -163,7 +175,7 @@ func (p *parser) parseField() *Field { func (p *parser) parseArguments(isConst bool) ArgumentList { var arguments ArgumentList - p.many(lexer.ParenL, lexer.ParenR, func() { + p.some(lexer.ParenL, lexer.ParenR, func() { arguments = append(arguments, p.parseArgument(isConst)) }) @@ -173,6 +185,7 @@ func (p *parser) parseArguments(isConst bool) ArgumentList { func (p *parser) parseArgument(isConst bool) *Argument { arg := Argument{} arg.Position = p.peekPos() + arg.Comment = p.comment arg.Name = p.parseName() p.expect(lexer.Colon) @@ -181,11 +194,12 @@ func (p *parser) parseArgument(isConst bool) *Argument { } func (p *parser) parseFragment() Selection { - p.expect(lexer.Spread) + _, comment := p.expect(lexer.Spread) if peek := p.peek(); peek.Kind == lexer.Name && peek.Value != "on" { return &FragmentSpread{ Position: p.peekPos(), + Comment: comment, Name: p.parseFragmentName(), Directives: p.parseDirectives(false), } @@ -193,6 +207,7 @@ func (p *parser) parseFragment() Selection { var def InlineFragment def.Position = p.peekPos() + def.Comment = comment if p.peek().Value == "on" { p.next() // "on" @@ -207,6 +222,7 @@ func (p *parser) parseFragment() Selection { func (p *parser) parseFragmentDefinition() *FragmentDefinition { var def FragmentDefinition def.Position = p.peekPos() + def.Comment = p.comment p.expectKeyword("fragment") def.Name = p.parseFragmentName() @@ -243,7 +259,7 @@ func (p *parser) parseValueLiteral(isConst bool) *Value { p.unexpectedError() return nil } - return &Value{Position: &token.Pos, Raw: p.parseVariable(), Kind: Variable} + return &Value{Position: &token.Pos, Comment: p.comment, Raw: p.parseVariable(), Kind: Variable} case lexer.Int: kind = IntValue case lexer.Float: @@ -268,32 +284,35 @@ func (p *parser) parseValueLiteral(isConst bool) *Value { p.next() - return &Value{Position: &token.Pos, Raw: token.Value, Kind: kind} + return &Value{Position: &token.Pos, Comment: p.comment, Raw: token.Value, Kind: kind} } func (p *parser) parseList(isConst bool) *Value { var values ChildValueList pos := p.peekPos() + comment := p.comment p.many(lexer.BracketL, lexer.BracketR, func() { values = append(values, &ChildValue{Value: p.parseValueLiteral(isConst)}) }) - return &Value{Children: values, Kind: ListValue, Position: pos} + return &Value{Children: values, Kind: ListValue, Position: pos, Comment: comment} } func (p *parser) parseObject(isConst bool) *Value { var fields ChildValueList pos := p.peekPos() + comment := p.comment p.many(lexer.BraceL, lexer.BraceR, func() { fields = append(fields, p.parseObjectField(isConst)) }) - return &Value{Children: fields, Kind: ObjectValue, Position: pos} + return &Value{Children: fields, Kind: ObjectValue, Position: pos, Comment: comment} } func (p *parser) parseObjectField(isConst bool) *ChildValue { field := ChildValue{} field.Position = p.peekPos() + field.Comment = p.comment field.Name = p.parseName() p.expect(lexer.Colon) @@ -343,7 +362,7 @@ func (p *parser) parseTypeReference() *Type { } func (p *parser) parseName() string { - token := p.expect(lexer.Name) + token, _ := p.expect(lexer.Name) return token.Value } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/query_test.yml b/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml similarity index 98% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/query_test.yml rename to vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml index a46a01e71..ec0580f5f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/query_test.yml +++ b/vendor/github.com/vektah/gqlparser/v2/parser/query_test.yml @@ -436,6 +436,7 @@ large queries: - Alias: "id" Name: "id" + Comment: "# Copyright (c) 2015-present, Facebook, Inc.\n#\n# This source code is licensed under the MIT license found in the\n# LICENSE file in the root directory of this source tree.\n" - Operation: Operation("mutation") Name: "likeStory" diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/schema.go b/vendor/github.com/vektah/gqlparser/v2/parser/schema.go similarity index 58% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/schema.go rename to vendor/github.com/vektah/gqlparser/v2/parser/schema.go index 32c293399..804f02c9f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/schema.go +++ b/vendor/github.com/vektah/gqlparser/v2/parser/schema.go @@ -1,40 +1,72 @@ package parser import ( - //nolint:revive - . "github.com/open-policy-agent/opa/internal/gqlparser/ast" - "github.com/open-policy-agent/opa/internal/gqlparser/lexer" + . "github.com/vektah/gqlparser/v2/ast" //nolint:staticcheck // bad, yeah + "github.com/vektah/gqlparser/v2/lexer" ) -func ParseSchema(source *Source) (*SchemaDocument, error) { - p := parser{ - lexer: lexer.New(source), - } - ast, err := p.parseSchemaDocument(), p.err - if err != nil { - return nil, err - } - - for _, def := range ast.Definitions { - def.BuiltIn = source.BuiltIn - } - for _, def := range ast.Extensions { - def.BuiltIn = source.BuiltIn - } - - return ast, nil -} - func ParseSchemas(inputs ...*Source) (*SchemaDocument, error) { - ast := &SchemaDocument{} + sd := &SchemaDocument{} for _, input := range inputs { inputAst, err := ParseSchema(input) if err != nil { return nil, err } - ast.Merge(inputAst) + sd.Merge(inputAst) } - return ast, nil + return sd, nil +} + +func ParseSchema(source *Source) (*SchemaDocument, error) { + p := parser{ + lexer: lexer.New(source), + maxTokenLimit: 0, // default value is unlimited + } + sd, err := p.parseSchemaDocument(), p.err + if err != nil { + return nil, err + } + + for _, def := range sd.Definitions { + def.BuiltIn = source.BuiltIn + } + for _, def := range sd.Extensions { + def.BuiltIn = source.BuiltIn + } + + return sd, nil +} + +func ParseSchemasWithLimit(maxTokenLimit int, inputs ...*Source) (*SchemaDocument, error) { + sd := &SchemaDocument{} + for _, input := range inputs { + inputAst, err := ParseSchemaWithLimit(input, maxTokenLimit) + if err != nil { + return nil, err + } + sd.Merge(inputAst) + } + return sd, nil +} + +func ParseSchemaWithLimit(source *Source, maxTokenLimit int) (*SchemaDocument, error) { + p := parser{ + lexer: lexer.New(source), + maxTokenLimit: maxTokenLimit, // 0 is unlimited + } + sd, err := p.parseSchemaDocument(), p.err + if err != nil { + return nil, err + } + + for _, def := range sd.Definitions { + def.BuiltIn = source.BuiltIn + } + for _, def := range sd.Extensions { + def.BuiltIn = source.BuiltIn + } + + return sd, nil } func (p *parser) parseSchemaDocument() *SchemaDocument { @@ -45,7 +77,7 @@ func (p *parser) parseSchemaDocument() *SchemaDocument { return nil } - var description string + var description descriptionWithComment if p.peek().Kind == lexer.BlockString || p.peek().Kind == lexer.String { description = p.parseDescription() } @@ -63,7 +95,7 @@ func (p *parser) parseSchemaDocument() *SchemaDocument { case "directive": doc.Directives = append(doc.Directives, p.parseDirectiveDefinition(description)) case "extend": - if description != "" { + if description.text != "" { p.unexpectedToken(p.prev) } p.parseTypeSystemExtension(&doc) @@ -73,20 +105,26 @@ func (p *parser) parseSchemaDocument() *SchemaDocument { } } + // treat end of file comments + doc.Comment = p.comment + return &doc } -func (p *parser) parseDescription() string { +func (p *parser) parseDescription() descriptionWithComment { token := p.peek() + var desc descriptionWithComment if token.Kind != lexer.BlockString && token.Kind != lexer.String { - return "" + return desc } - return p.next().Value + desc.comment = p.comment + desc.text = p.next().Value + return desc } -func (p *parser) parseTypeSystemDefinition(description string) *Definition { +func (p *parser) parseTypeSystemDefinition(description descriptionWithComment) *Definition { tok := p.peek() if tok.Kind != lexer.Name { p.unexpectedError() @@ -112,15 +150,17 @@ func (p *parser) parseTypeSystemDefinition(description string) *Definition { } } -func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition { - p.expectKeyword("schema") +func (p *parser) parseSchemaDefinition(description descriptionWithComment) *SchemaDefinition { + _, comment := p.expectKeyword("schema") - def := SchemaDefinition{Description: description} + def := SchemaDefinition{} def.Position = p.peekPos() - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Directives = p.parseDirectives(true) - p.some(lexer.BraceL, lexer.BraceR, func() { + def.EndOfDefinitionComment = p.some(lexer.BraceL, lexer.BraceR, func() { def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) }) return &def @@ -129,35 +169,40 @@ func (p *parser) parseSchemaDefinition(description string) *SchemaDefinition { func (p *parser) parseOperationTypeDefinition() *OperationTypeDefinition { var op OperationTypeDefinition op.Position = p.peekPos() + op.Comment = p.comment op.Operation = p.parseOperationType() p.expect(lexer.Colon) op.Type = p.parseName() return &op } -func (p *parser) parseScalarTypeDefinition(description string) *Definition { - p.expectKeyword("scalar") +func (p *parser) parseScalarTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("scalar") var def Definition def.Position = p.peekPos() + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Kind = Scalar - def.Description = description def.Name = p.parseName() def.Directives = p.parseDirectives(true) return &def } -func (p *parser) parseObjectTypeDefinition(description string) *Definition { - p.expectKeyword("type") +func (p *parser) parseObjectTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("type") var def Definition def.Position = p.peekPos() def.Kind = Object - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Interfaces = p.parseImplementsInterfaces() def.Directives = p.parseDirectives(true) - def.Fields = p.parseFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseFieldsDefinition() return &def } @@ -176,18 +221,26 @@ func (p *parser) parseImplementsInterfaces() []string { return types } -func (p *parser) parseFieldsDefinition() FieldList { +func (p *parser) parseFieldsDefinition() (FieldList, *CommentGroup) { var defs FieldList - p.some(lexer.BraceL, lexer.BraceR, func() { + comment := p.some(lexer.BraceL, lexer.BraceR, func() { defs = append(defs, p.parseFieldDefinition()) }) - return defs + return defs, comment } func (p *parser) parseFieldDefinition() *FieldDefinition { var def FieldDefinition def.Position = p.peekPos() - def.Description = p.parseDescription() + + desc := p.parseDescription() + if desc.text != "" { + def.BeforeDescriptionComment = desc.comment + def.Description = desc.text + } + + p.peek() // peek to set p.comment + def.AfterDescriptionComment = p.comment def.Name = p.parseName() def.Arguments = p.parseArgumentDefs() p.expect(lexer.Colon) @@ -208,7 +261,15 @@ func (p *parser) parseArgumentDefs() ArgumentDefinitionList { func (p *parser) parseArgumentDef() *ArgumentDefinition { var def ArgumentDefinition def.Position = p.peekPos() - def.Description = p.parseDescription() + + desc := p.parseDescription() + if desc.text != "" { + def.BeforeDescriptionComment = desc.comment + def.Description = desc.text + } + + p.peek() // peek to set p.comment + def.AfterDescriptionComment = p.comment def.Name = p.parseName() p.expect(lexer.Colon) def.Type = p.parseTypeReference() @@ -222,7 +283,15 @@ func (p *parser) parseArgumentDef() *ArgumentDefinition { func (p *parser) parseInputValueDef() *FieldDefinition { var def FieldDefinition def.Position = p.peekPos() - def.Description = p.parseDescription() + + desc := p.parseDescription() + if desc.text != "" { + def.BeforeDescriptionComment = desc.comment + def.Description = desc.text + } + + p.peek() // peek to set p.comment + def.AfterDescriptionComment = p.comment def.Name = p.parseName() p.expect(lexer.Colon) def.Type = p.parseTypeReference() @@ -233,27 +302,31 @@ func (p *parser) parseInputValueDef() *FieldDefinition { return &def } -func (p *parser) parseInterfaceTypeDefinition(description string) *Definition { - p.expectKeyword("interface") +func (p *parser) parseInterfaceTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("interface") var def Definition def.Position = p.peekPos() def.Kind = Interface - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Interfaces = p.parseImplementsInterfaces() def.Directives = p.parseDirectives(true) - def.Fields = p.parseFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseFieldsDefinition() return &def } -func (p *parser) parseUnionTypeDefinition(description string) *Definition { - p.expectKeyword("union") +func (p *parser) parseUnionTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("union") var def Definition def.Position = p.peekPos() def.Kind = Union - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Directives = p.parseDirectives(true) def.Types = p.parseUnionMemberTypes() @@ -274,87 +347,101 @@ func (p *parser) parseUnionMemberTypes() []string { return types } -func (p *parser) parseEnumTypeDefinition(description string) *Definition { - p.expectKeyword("enum") +func (p *parser) parseEnumTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("enum") var def Definition def.Position = p.peekPos() def.Kind = Enum - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Directives = p.parseDirectives(true) - def.EnumValues = p.parseEnumValuesDefinition() + def.EnumValues, def.EndOfDefinitionComment = p.parseEnumValuesDefinition() return &def } -func (p *parser) parseEnumValuesDefinition() EnumValueList { +func (p *parser) parseEnumValuesDefinition() (EnumValueList, *CommentGroup) { var values EnumValueList - p.some(lexer.BraceL, lexer.BraceR, func() { + comment := p.some(lexer.BraceL, lexer.BraceR, func() { values = append(values, p.parseEnumValueDefinition()) }) - return values + return values, comment } func (p *parser) parseEnumValueDefinition() *EnumValueDefinition { - return &EnumValueDefinition{ - Position: p.peekPos(), - Description: p.parseDescription(), - Name: p.parseName(), - Directives: p.parseDirectives(true), + var def EnumValueDefinition + def.Position = p.peekPos() + desc := p.parseDescription() + if desc.text != "" { + def.BeforeDescriptionComment = desc.comment + def.Description = desc.text } + + p.peek() // peek to set p.comment + def.AfterDescriptionComment = p.comment + + def.Name = p.parseName() + def.Directives = p.parseDirectives(true) + + return &def } -func (p *parser) parseInputObjectTypeDefinition(description string) *Definition { - p.expectKeyword("input") +func (p *parser) parseInputObjectTypeDefinition(description descriptionWithComment) *Definition { + _, comment := p.expectKeyword("input") var def Definition def.Position = p.peekPos() def.Kind = InputObject - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Directives = p.parseDirectives(true) - def.Fields = p.parseInputFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseInputFieldsDefinition() return &def } -func (p *parser) parseInputFieldsDefinition() FieldList { +func (p *parser) parseInputFieldsDefinition() (FieldList, *CommentGroup) { var values FieldList - p.some(lexer.BraceL, lexer.BraceR, func() { + comment := p.some(lexer.BraceL, lexer.BraceR, func() { values = append(values, p.parseInputValueDef()) }) - return values + return values, comment } func (p *parser) parseTypeSystemExtension(doc *SchemaDocument) { - p.expectKeyword("extend") + _, comment := p.expectKeyword("extend") switch p.peek().Value { case "schema": - doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension()) + doc.SchemaExtension = append(doc.SchemaExtension, p.parseSchemaExtension(comment)) case "scalar": - doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseScalarTypeExtension(comment)) case "type": - doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseObjectTypeExtension(comment)) case "interface": - doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseInterfaceTypeExtension(comment)) case "union": - doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseUnionTypeExtension(comment)) case "enum": - doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseEnumTypeExtension(comment)) case "input": - doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension()) + doc.Extensions = append(doc.Extensions, p.parseInputObjectTypeExtension(comment)) default: p.unexpectedError() } } -func (p *parser) parseSchemaExtension() *SchemaDefinition { +func (p *parser) parseSchemaExtension(comment *CommentGroup) *SchemaDefinition { p.expectKeyword("schema") var def SchemaDefinition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Directives = p.parseDirectives(true) - p.some(lexer.BraceL, lexer.BraceR, func() { + def.EndOfDefinitionComment = p.some(lexer.BraceL, lexer.BraceR, func() { def.OperationTypes = append(def.OperationTypes, p.parseOperationTypeDefinition()) }) if len(def.Directives) == 0 && len(def.OperationTypes) == 0 { @@ -363,11 +450,12 @@ func (p *parser) parseSchemaExtension() *SchemaDefinition { return &def } -func (p *parser) parseScalarTypeExtension() *Definition { +func (p *parser) parseScalarTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("scalar") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Scalar def.Name = p.parseName() def.Directives = p.parseDirectives(true) @@ -377,42 +465,45 @@ func (p *parser) parseScalarTypeExtension() *Definition { return &def } -func (p *parser) parseObjectTypeExtension() *Definition { +func (p *parser) parseObjectTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("type") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Object def.Name = p.parseName() def.Interfaces = p.parseImplementsInterfaces() def.Directives = p.parseDirectives(true) - def.Fields = p.parseFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseFieldsDefinition() if len(def.Interfaces) == 0 && len(def.Directives) == 0 && len(def.Fields) == 0 { p.unexpectedError() } return &def } -func (p *parser) parseInterfaceTypeExtension() *Definition { +func (p *parser) parseInterfaceTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("interface") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Interface def.Name = p.parseName() def.Directives = p.parseDirectives(true) - def.Fields = p.parseFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseFieldsDefinition() if len(def.Directives) == 0 && len(def.Fields) == 0 { p.unexpectedError() } return &def } -func (p *parser) parseUnionTypeExtension() *Definition { +func (p *parser) parseUnionTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("union") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Union def.Name = p.parseName() def.Directives = p.parseDirectives(true) @@ -424,43 +515,47 @@ func (p *parser) parseUnionTypeExtension() *Definition { return &def } -func (p *parser) parseEnumTypeExtension() *Definition { +func (p *parser) parseEnumTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("enum") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = Enum def.Name = p.parseName() def.Directives = p.parseDirectives(true) - def.EnumValues = p.parseEnumValuesDefinition() + def.EnumValues, def.EndOfDefinitionComment = p.parseEnumValuesDefinition() if len(def.Directives) == 0 && len(def.EnumValues) == 0 { p.unexpectedError() } return &def } -func (p *parser) parseInputObjectTypeExtension() *Definition { +func (p *parser) parseInputObjectTypeExtension(comment *CommentGroup) *Definition { p.expectKeyword("input") var def Definition def.Position = p.peekPos() + def.AfterDescriptionComment = comment def.Kind = InputObject def.Name = p.parseName() def.Directives = p.parseDirectives(false) - def.Fields = p.parseInputFieldsDefinition() + def.Fields, def.EndOfDefinitionComment = p.parseInputFieldsDefinition() if len(def.Directives) == 0 && len(def.Fields) == 0 { p.unexpectedError() } return &def } -func (p *parser) parseDirectiveDefinition(description string) *DirectiveDefinition { - p.expectKeyword("directive") +func (p *parser) parseDirectiveDefinition(description descriptionWithComment) *DirectiveDefinition { + _, comment := p.expectKeyword("directive") p.expect(lexer.At) var def DirectiveDefinition def.Position = p.peekPos() - def.Description = description + def.BeforeDescriptionComment = description.comment + def.Description = description.text + def.AfterDescriptionComment = comment def.Name = p.parseName() def.Arguments = p.parseArgumentDefs() @@ -487,7 +582,7 @@ func (p *parser) parseDirectiveLocations() []DirectiveLocation { } func (p *parser) parseDirectiveLocation() DirectiveLocation { - name := p.expect(lexer.Name) + name, _ := p.expect(lexer.Name) switch name.Value { case `QUERY`: @@ -533,3 +628,8 @@ func (p *parser) parseDirectiveLocation() DirectiveLocation { p.unexpectedToken(name) return "" } + +type descriptionWithComment struct { + text string + comment *CommentGroup +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/schema_test.yml b/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml similarity index 81% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/schema_test.yml rename to vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml index 8b6a5d0ca..705514a99 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/parser/schema_test.yml +++ b/vendor/github.com/vektah/gqlparser/v2/parser/schema_test.yml @@ -15,6 +15,67 @@ object types: Name: "world" Type: String + - name: with comments + input: | + # Hello + # Hello another + type Hello { + # World + # World another + world: String + # end of type comments + } + # end of file comments + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Name: "Hello" + Fields: [FieldDefinition] + - + Name: "world" + Type: String + AfterDescriptionComment: "# World\n# World another\n" + AfterDescriptionComment: "# Hello\n# Hello another\n" + EndOfDefinitionComment: "# end of type comments\n" + Comment: "# end of file comments\n" + + - name: with comments and description + input: | + # Hello + # Hello another + "type description" + # Hello after description + # Hello after description another + type Hello { + # World + # World another + "field description" + # World after description + # World after description another + world: String + # end of definition coments + # end of definition comments another + } + ast: | + + Definitions: [Definition] + - + Kind: DefinitionKind("OBJECT") + Description: "type description" + Name: "Hello" + Fields: [FieldDefinition] + - + Description: "field description" + Name: "world" + Type: String + BeforeDescriptionComment: "# World\n# World another\n" + AfterDescriptionComment: "# World after description\n# World after description another\n" + BeforeDescriptionComment: "# Hello\n# Hello another\n" + AfterDescriptionComment: "# Hello after description\n# Hello after description another\n" + EndOfDefinitionComment: "# end of definition coments\n# end of definition comments another\n" + - name: with description input: | "Description" @@ -35,6 +96,7 @@ object types: - name: with block description input: | + # Before description comment """ Description """ @@ -53,6 +115,8 @@ object types: - Name: "world" Type: String + BeforeDescriptionComment: "# Before description comment\n" + AfterDescriptionComment: "# Even with comments between them\n" - name: with field arg input: | type Hello { @@ -146,8 +210,11 @@ object types: type extensions: - name: Object extension input: | + # comment extend type Hello { + # comment world world: String + # end of definition comment } ast: | @@ -159,6 +226,9 @@ type extensions: - Name: "world" Type: String + AfterDescriptionComment: "# comment world\n" + AfterDescriptionComment: "# comment\n" + EndOfDefinitionComment: "# end of definition comment\n" - name: without any fields input: "extend type Hello implements Greeting" @@ -277,6 +347,30 @@ schema definition: Operation: Operation("query") Type: "Query" + - name: with comments and description + input: | + # before description comment + "description" + # after description comment + schema { + # before field comment + query: Query + # after field comment + } + ast: | + + Schema: [SchemaDefinition] + - + Description: "description" + OperationTypes: [OperationTypeDefinition] + - + Operation: Operation("query") + Type: "Query" + Comment: "# before field comment\n" + BeforeDescriptionComment: "# before description comment\n" + AfterDescriptionComment: "# after description comment\n" + EndOfDefinitionComment: "# after field comment\n" + schema extensions: - name: simple input: | @@ -292,6 +386,26 @@ schema extensions: Operation: Operation("mutation") Type: "Mutation" + - name: with comment and description + input: | + # before extend comment + extend schema { + # before field comment + mutation: Mutation + # after field comment + } + ast: | + + SchemaExtension: [SchemaDefinition] + - + OperationTypes: [OperationTypeDefinition] + - + Operation: Operation("mutation") + Type: "Mutation" + Comment: "# before field comment\n" + AfterDescriptionComment: "# before extend comment\n" + EndOfDefinitionComment: "# after field comment\n" + - name: directive only input: "extend schema @directive" ast: | diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/error.go b/vendor/github.com/vektah/gqlparser/v2/validator/error.go similarity index 91% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/error.go rename to vendor/github.com/vektah/gqlparser/v2/validator/error.go index f31f180a2..f8f76055a 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/error.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/error.go @@ -3,8 +3,8 @@ package validator import ( "fmt" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - "github.com/open-policy-agent/opa/internal/gqlparser/gqlerror" + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" ) type ErrorOption func(err *gqlerror.Error) diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/imported/prelude.graphql b/vendor/github.com/vektah/gqlparser/v2/validator/imported/prelude.graphql new file mode 100644 index 000000000..8be3d2f5b --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/imported/prelude.graphql @@ -0,0 +1,250 @@ +# This file defines all the implicitly declared types that are required by the graphql spec. It is implicitly included by calls to LoadSchema + +"The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1." +scalar Int + +"The `Float` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point)." +scalar Float + +"The `String`scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text." +scalar String + +"The `Boolean` scalar type represents `true` or `false`." +scalar Boolean + +"""The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as "4") or integer (such as 4) input value will be accepted as an ID.""" +scalar ID + +"Directs the executor to defer this fragment when the `if` argument is true or undefined." +directive @defer( + "Deferred when true or undefined." + if: Boolean = true, + "Unique name" + label: String +) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +""" +Directs the executor to include this field or fragment only when the `if` argument is true. +""" +directive @include( + """Included when true.""" + if: Boolean! +) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +""" +Directs the executor to skip this field or fragment when the `if` argument is true. +""" +directive @skip( + """Skipped when true.""" + if: Boolean! +) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + +"""Marks an element of a GraphQL schema as no longer supported.""" +directive @deprecated( + """ + Explains why this element was deprecated, usually also including a suggestion for how to access supported similar data. Formatted using the Markdown syntax, as specified by [CommonMark](https://commonmark.org/). + """ + reason: String = "No longer supported" +) on FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE + +"""Exposes a URL that specifies the behavior of this scalar.""" +directive @specifiedBy( + """The URL that specifies the behavior of this scalar.""" + url: String! +) on SCALAR + +""" +Indicates exactly one field must be supplied and this field must not be `null`. +""" +directive @oneOf on INPUT_OBJECT + +""" +A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations. +""" +type __Schema { + description: String + + """A list of all types supported by this server.""" + types: [__Type!]! + + """The type that query operations will be rooted at.""" + queryType: __Type! + + """ + If this server supports mutation, the type that mutation operations will be rooted at. + """ + mutationType: __Type + + """ + If this server support subscription, the type that subscription operations will be rooted at. + """ + subscriptionType: __Type + + """A list of all directives supported by this server.""" + directives: [__Directive!]! +} + +""" +The fundamental unit of any GraphQL Schema is the type. There are many kinds of types in GraphQL as represented by the `__TypeKind` enum. + +Depending on the kind of a type, certain fields describe information about that type. Scalar types provide no information beyond a name, description and optional `specifiedByURL`, while Enum types provide their values. Object and Interface types provide the fields they describe. Abstract types, Union and Interface, provide the Object types possible at runtime. List and NonNull types compose other types. +""" +type __Type { + kind: __TypeKind! + name: String + description: String + specifiedByURL: String + fields(includeDeprecated: Boolean = false): [__Field!] + interfaces: [__Type!] + possibleTypes: [__Type!] + enumValues(includeDeprecated: Boolean = false): [__EnumValue!] + inputFields(includeDeprecated: Boolean = false): [__InputValue!] + ofType: __Type + isOneOf: Boolean +} + +"""An enum describing what kind of type a given `__Type` is.""" +enum __TypeKind { + """Indicates this type is a scalar.""" + SCALAR + + """ + Indicates this type is an object. `fields` and `interfaces` are valid fields. + """ + OBJECT + + """ + Indicates this type is an interface. `fields`, `interfaces`, and `possibleTypes` are valid fields. + """ + INTERFACE + + """Indicates this type is a union. `possibleTypes` is a valid field.""" + UNION + + """Indicates this type is an enum. `enumValues` is a valid field.""" + ENUM + + """ + Indicates this type is an input object. `inputFields` is a valid field. + """ + INPUT_OBJECT + + """Indicates this type is a list. `ofType` is a valid field.""" + LIST + + """Indicates this type is a non-null. `ofType` is a valid field.""" + NON_NULL +} + +""" +Object and Interface types are described by a list of Fields, each of which has a name, potentially a list of arguments, and a return type. +""" +type __Field { + name: String! + description: String + args(includeDeprecated: Boolean = false): [__InputValue!]! + type: __Type! + isDeprecated: Boolean! + deprecationReason: String +} + +""" +Arguments provided to Fields or Directives and the input fields of an InputObject are represented as Input Values which describe their type and optionally a default value. +""" +type __InputValue { + name: String! + description: String + type: __Type! + + """ + A GraphQL-formatted string representing the default value for this input value. + """ + defaultValue: String + isDeprecated: Boolean! + deprecationReason: String +} + +""" +One possible value for a given Enum. Enum values are unique values, not a placeholder for a string or numeric value. However an Enum value is returned in a JSON response as a string. +""" +type __EnumValue { + name: String! + description: String + isDeprecated: Boolean! + deprecationReason: String +} + +""" +A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document. + +In some cases, you need to provide options to alter GraphQL's execution behavior in ways field arguments will not suffice, such as conditionally including or skipping a field. Directives provide this by describing additional information to the executor. +""" +type __Directive { + name: String! + description: String + isRepeatable: Boolean! + locations: [__DirectiveLocation!]! + args(includeDeprecated: Boolean = false): [__InputValue!]! +} + +""" +A Directive can be adjacent to many parts of the GraphQL language, a __DirectiveLocation describes one such possible adjacencies. +""" +enum __DirectiveLocation { + """Location adjacent to a query operation.""" + QUERY + + """Location adjacent to a mutation operation.""" + MUTATION + + """Location adjacent to a subscription operation.""" + SUBSCRIPTION + + """Location adjacent to a field.""" + FIELD + + """Location adjacent to a fragment definition.""" + FRAGMENT_DEFINITION + + """Location adjacent to a fragment spread.""" + FRAGMENT_SPREAD + + """Location adjacent to an inline fragment.""" + INLINE_FRAGMENT + + """Location adjacent to a variable definition.""" + VARIABLE_DEFINITION + + """Location adjacent to a schema definition.""" + SCHEMA + + """Location adjacent to a scalar definition.""" + SCALAR + + """Location adjacent to an object type definition.""" + OBJECT + + """Location adjacent to a field definition.""" + FIELD_DEFINITION + + """Location adjacent to an argument definition.""" + ARGUMENT_DEFINITION + + """Location adjacent to an interface definition.""" + INTERFACE + + """Location adjacent to a union definition.""" + UNION + + """Location adjacent to an enum definition.""" + ENUM + + """Location adjacent to an enum value definition.""" + ENUM_VALUE + + """Location adjacent to an input object type definition.""" + INPUT_OBJECT + + """Location adjacent to an input object field definition.""" + INPUT_FIELD_DEFINITION +} \ No newline at end of file diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/messaging.go b/vendor/github.com/vektah/gqlparser/v2/validator/messaging.go similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/messaging.go rename to vendor/github.com/vektah/gqlparser/v2/validator/messaging.go diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/prelude.go b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go similarity index 66% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/prelude.go rename to vendor/github.com/vektah/gqlparser/v2/validator/prelude.go index 86796fab6..5c88e93b3 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/prelude.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/prelude.go @@ -3,10 +3,10 @@ package validator import ( _ "embed" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" ) -//go:embed prelude.graphql +//go:embed imported/prelude.graphql var preludeGraphql string var Prelude = &ast.Source{ diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fields_on_correct_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go similarity index 56% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fields_on_correct_type.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go index f68176747..b57d2a901 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fields_on_correct_type.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fields_on_correct_type.go @@ -1,40 +1,58 @@ -package validator +package rules import ( "fmt" "sort" "strings" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("FieldsOnCorrectType", func(observers *Events, addError AddErrFunc) { - observers.OnField(func(walker *Walker, field *ast.Field) { - if field.ObjectDefinition == nil || field.Definition != nil { - return - } +func ruleFuncFieldsOnCorrectType(observers *Events, addError AddErrFunc, disableSuggestion bool) { + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.ObjectDefinition == nil || field.Definition != nil { + return + } - message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name) + message := fmt.Sprintf(`Cannot query field "%s" on type "%s".`, field.Name, field.ObjectDefinition.Name) + if !disableSuggestion { if suggestedTypeNames := getSuggestedTypeNames(walker, field.ObjectDefinition, field.Name); suggestedTypeNames != nil { message += " Did you mean to use an inline fragment on " + QuotedOrList(suggestedTypeNames...) + "?" } else if suggestedFieldNames := getSuggestedFieldNames(field.ObjectDefinition, field.Name); suggestedFieldNames != nil { message += " Did you mean " + QuotedOrList(suggestedFieldNames...) + "?" } + } - addError( - Message(message), //nolint:govet - At(field.Position), - ) - }) + addError( + Message("%s", message), + At(field.Position), + ) }) } -// Go through all of the implementations of type, as well as the interfaces +var FieldsOnCorrectTypeRule = Rule{ + Name: "FieldsOnCorrectType", + RuleFunc: func(observers *Events, addError AddErrFunc) { + ruleFuncFieldsOnCorrectType(observers, addError, false) + }, +} + +var FieldsOnCorrectTypeRuleWithoutSuggestions = Rule{ + Name: "FieldsOnCorrectTypeWithoutSuggestions", + RuleFunc: func(observers *Events, addError AddErrFunc) { + ruleFuncFieldsOnCorrectType(observers, addError, true) + }, +} + +func init() { + AddRule(FieldsOnCorrectTypeRule.Name, FieldsOnCorrectTypeRule.RuleFunc) +} + +// Go through all the implementations of type, as well as the interfaces // that they implement. If any of those types include the provided field, // suggest them, sorted by how often the type is referenced, starting // with Interfaces. @@ -44,7 +62,7 @@ func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) } possibleTypes := walker.Schema.GetPossibleTypes(parent) - var suggestedObjectTypes = make([]string, 0, len(possibleTypes)) + suggestedObjectTypes := make([]string, 0, len(possibleTypes)) var suggestedInterfaceTypes []string interfaceUsageCount := map[string]int{} @@ -67,7 +85,7 @@ func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) } } - suggestedTypes := append(suggestedInterfaceTypes, suggestedObjectTypes...) + suggestedTypes := concatSlice(suggestedInterfaceTypes, suggestedObjectTypes) sort.SliceStable(suggestedTypes, func(i, j int) bool { typeA, typeB := suggestedTypes[i], suggestedTypes[j] @@ -81,6 +99,16 @@ func getSuggestedTypeNames(walker *Walker, parent *ast.Definition, name string) return suggestedTypes } +// By employing a full slice expression (slice[low:high:max]), +// where max is set to the slice’s length, +// we ensure that appending elements results +// in a slice backed by a distinct array. +// This method prevents the shared array issue +func concatSlice(first []string, second []string) []string { + n := len(first) + return append(first[:n:n], second...) +} + // For the field name provided, determine if there are any similar field names // that may be the result of a typo. func getSuggestedFieldNames(parent *ast.Definition, name string) []string { @@ -88,7 +116,7 @@ func getSuggestedFieldNames(parent *ast.Definition, name string) []string { return nil } - var possibleFieldNames = make([]string, 0, len(parent.Fields)) + possibleFieldNames := make([]string, 0, len(parent.Fields)) for _, field := range parent.Fields { possibleFieldNames = append(possibleFieldNames, field.Name) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fragments_on_composite_types.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go similarity index 58% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fragments_on_composite_types.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go index 861e3b16c..a88e3f1cf 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/fragments_on_composite_types.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/fragments_on_composite_types.go @@ -1,16 +1,17 @@ -package validator +package rules import ( "fmt" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("FragmentsOnCompositeTypes", func(observers *Events, addError AddErrFunc) { +var FragmentsOnCompositeTypesRule = Rule{ + Name: "FragmentsOnCompositeTypes", + RuleFunc: func(observers *Events, addError AddErrFunc) { observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { fragmentType := walker.Schema.Types[inlineFragment.TypeCondition] if fragmentType == nil || fragmentType.IsCompositeType() { @@ -20,12 +21,12 @@ func init() { message := fmt.Sprintf(`Fragment cannot condition on non composite type "%s".`, inlineFragment.TypeCondition) addError( - Message(message), //nolint:govet + Message("%s", message), At(inlineFragment.Position), ) }) - observers.OnFragment(func(_ *Walker, fragment *ast.FragmentDefinition) { + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { if fragment.Definition == nil || fragment.TypeCondition == "" || fragment.Definition.IsCompositeType() { return } @@ -33,9 +34,13 @@ func init() { message := fmt.Sprintf(`Fragment "%s" cannot condition on non composite type "%s".`, fragment.Name, fragment.TypeCondition) addError( - Message(message), //nolint:govet + Message("%s", message), At(fragment.Position), ) }) - }) + }, +} + +func init() { + AddRule(FragmentsOnCompositeTypesRule.Name, FragmentsOnCompositeTypesRule.RuleFunc) } diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go new file mode 100644 index 000000000..83b4e0575 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_argument_names.go @@ -0,0 +1,88 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +func ruleFuncKnownArgumentNames(observers *Events, addError AddErrFunc, disableSuggestion bool) { + // A GraphQL field is only valid if all supplied arguments are defined by that field. + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.Definition == nil || field.ObjectDefinition == nil { + return + } + for _, arg := range field.Arguments { + def := field.Definition.Arguments.ForName(arg.Name) + if def != nil { + continue + } + + if disableSuggestion { + addError( + Message(`Unknown argument "%s" on field "%s.%s".`, arg.Name, field.ObjectDefinition.Name, field.Name), + At(field.Position), + ) + } else { + var suggestions []string + for _, argDef := range field.Definition.Arguments { + suggestions = append(suggestions, argDef.Name) + } + addError( + Message(`Unknown argument "%s" on field "%s.%s".`, arg.Name, field.ObjectDefinition.Name, field.Name), + SuggestListQuoted("Did you mean", arg.Name, suggestions), + At(field.Position), + ) + } + } + }) + + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + if directive.Definition == nil { + return + } + for _, arg := range directive.Arguments { + def := directive.Definition.Arguments.ForName(arg.Name) + if def != nil { + continue + } + + if disableSuggestion { + addError( + Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name), + At(directive.Position), + ) + } else { + var suggestions []string + for _, argDef := range directive.Definition.Arguments { + suggestions = append(suggestions, argDef.Name) + } + + addError( + Message(`Unknown argument "%s" on directive "@%s".`, arg.Name, directive.Name), + SuggestListQuoted("Did you mean", arg.Name, suggestions), + At(directive.Position), + ) + } + } + }) +} + +var KnownArgumentNamesRule = Rule{ + Name: "KnownArgumentNames", + RuleFunc: func(observers *Events, addError AddErrFunc) { + ruleFuncKnownArgumentNames(observers, addError, false) + }, +} + +var KnownArgumentNamesRuleWithoutSuggestions = Rule{ + Name: "KnownArgumentNamesWithoutSuggestions", + RuleFunc: func(observers *Events, addError AddErrFunc) { + ruleFuncKnownArgumentNames(observers, addError, true) + }, +} + +func init() { + AddRule(KnownArgumentNamesRule.Name, KnownArgumentNamesRule.RuleFunc) +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_directives.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go similarity index 62% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_directives.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go index 9855291e3..ccb5efeb9 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_directives.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_directives.go @@ -1,21 +1,22 @@ -package validator +package rules import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("KnownDirectives", func(observers *Events, addError AddErrFunc) { +var KnownDirectivesRule = Rule{ + Name: "KnownDirectives", + RuleFunc: func(observers *Events, addError AddErrFunc) { type mayNotBeUsedDirective struct { Name string Line int Column int } - var seen = map[mayNotBeUsedDirective]bool{} - observers.OnDirective(func(_ *Walker, directive *ast.Directive) { + seen := map[mayNotBeUsedDirective]bool{} + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { if directive.Definition == nil { addError( Message(`Unknown directive "@%s".`, directive.Name), @@ -45,5 +46,9 @@ func init() { seen[tmp] = true } }) - }) + }, +} + +func init() { + AddRule(KnownDirectivesRule.Name, KnownDirectivesRule.RuleFunc) } diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go new file mode 100644 index 000000000..525698fb9 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_fragment_names.go @@ -0,0 +1,26 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var KnownFragmentNamesRule = Rule{ + Name: "KnownFragmentNames", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + if fragmentSpread.Definition == nil { + addError( + Message(`Unknown fragment "%s".`, fragmentSpread.Name), + At(fragmentSpread.Position), + ) + } + }) + }, +} + +func init() { + AddRule(KnownFragmentNamesRule.Name, KnownFragmentNamesRule.RuleFunc) +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_root_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go similarity index 69% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_root_type.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go index ab97cd901..aa66d16c2 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/known_root_type.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_root_type.go @@ -1,16 +1,17 @@ -package validator +package rules import ( "fmt" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("KnownRootType", func(observers *Events, addError AddErrFunc) { +var KnownRootTypeRule = Rule{ + Name: "KnownRootType", + RuleFunc: func(observers *Events, addError AddErrFunc) { // A query's root must be a valid type. Surprisingly, this isn't // checked anywhere else! observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { @@ -33,5 +34,9 @@ func init() { At(operation.Position)) } }) - }) + }, +} + +func init() { + AddRule(KnownRootTypeRule.Name, KnownRootTypeRule.RuleFunc) } diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go new file mode 100644 index 000000000..ef85c58e6 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/known_type_names.go @@ -0,0 +1,84 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +func ruleFuncKnownTypeNames(observers *Events, addError AddErrFunc, disableSuggestion bool) { + observers.OnVariable(func(walker *Walker, variable *ast.VariableDefinition) { + typeName := variable.Type.Name() + typdef := walker.Schema.Types[typeName] + if typdef != nil { + return + } + + addError( + Message(`Unknown type "%s".`, typeName), + At(variable.Position), + ) + }) + + observers.OnInlineFragment(func(walker *Walker, inlineFragment *ast.InlineFragment) { + typedName := inlineFragment.TypeCondition + if typedName == "" { + return + } + + def := walker.Schema.Types[typedName] + if def != nil { + return + } + + addError( + Message(`Unknown type "%s".`, typedName), + At(inlineFragment.Position), + ) + }) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + typeName := fragment.TypeCondition + def := walker.Schema.Types[typeName] + if def != nil { + return + } + + if disableSuggestion { + addError( + Message(`Unknown type "%s".`, typeName), + At(fragment.Position), + ) + } else { + var possibleTypes []string + for _, t := range walker.Schema.Types { + possibleTypes = append(possibleTypes, t.Name) + } + + addError( + Message(`Unknown type "%s".`, typeName), + SuggestListQuoted("Did you mean", typeName, possibleTypes), + At(fragment.Position), + ) + } + }) +} + +var KnownTypeNamesRule = Rule{ + Name: "KnownTypeNames", + RuleFunc: func(observers *Events, addError AddErrFunc) { + ruleFuncKnownTypeNames(observers, addError, false) + }, +} + +var KnownTypeNamesRuleWithoutSuggestions = Rule{ + Name: "KnownTypeNamesWithoutSuggestions", + RuleFunc: func(observers *Events, addError AddErrFunc) { + ruleFuncKnownTypeNames(observers, addError, true) + }, +} + +func init() { + AddRule(KnownTypeNamesRule.Name, KnownTypeNamesRule.RuleFunc) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go new file mode 100644 index 000000000..6e246f715 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/lone_anonymous_operation.go @@ -0,0 +1,26 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var LoneAnonymousOperationRule = Rule{ + Name: "LoneAnonymousOperation", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + if operation.Name == "" && len(walker.Document.Operations) > 1 { + addError( + Message(`This anonymous operation must be the only defined operation.`), + At(operation.Position), + ) + } + }) + }, +} + +func init() { + AddRule(LoneAnonymousOperationRule.Name, LoneAnonymousOperationRule.RuleFunc) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/max_introspection_depth.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/max_introspection_depth.go new file mode 100644 index 000000000..57a68b32b --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/max_introspection_depth.go @@ -0,0 +1,90 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +const maxListsDepth = 3 + +var MaxIntrospectionDepth = Rule{ + Name: "MaxIntrospectionDepth", + RuleFunc: func(observers *Events, addError AddErrFunc) { + // Counts the depth of list fields in "__Type" recursively and + // returns `true` if the limit has been reached. + observers.OnField(func(walker *Walker, field *ast.Field) { + if field.Name == "__schema" || field.Name == "__type" { + visitedFragments := make(map[string]bool) + if checkDepthField(field, visitedFragments, 0) { + addError( + Message(`Maximum introspection depth exceeded`), + At(field.Position), + ) + } + return + } + }) + }, +} + +func checkDepthSelectionSet(selectionSet ast.SelectionSet, visitedFragments map[string]bool, depth int) bool { + for _, child := range selectionSet { + if field, ok := child.(*ast.Field); ok { + if checkDepthField(field, visitedFragments, depth) { + return true + } + } + if fragmentSpread, ok := child.(*ast.FragmentSpread); ok { + if checkDepthFragmentSpread(fragmentSpread, visitedFragments, depth) { + return true + } + } + if inlineFragment, ok := child.(*ast.InlineFragment); ok { + if checkDepthSelectionSet(inlineFragment.SelectionSet, visitedFragments, depth) { + return true + } + } + } + return false +} + +func checkDepthField(field *ast.Field, visitedFragments map[string]bool, depth int) bool { + if field.Name == "fields" || + field.Name == "interfaces" || + field.Name == "possibleTypes" || + field.Name == "inputFields" { + depth++ + if depth >= maxListsDepth { + return true + } + } + return checkDepthSelectionSet(field.SelectionSet, visitedFragments, depth) +} + +func checkDepthFragmentSpread(fragmentSpread *ast.FragmentSpread, visitedFragments map[string]bool, depth int) bool { + fragmentName := fragmentSpread.Name + if visited, ok := visitedFragments[fragmentName]; ok && visited { + // Fragment cycles are handled by `NoFragmentCyclesRule`. + return false + } + fragment := fragmentSpread.Definition + if fragment == nil { + // Missing fragments checks are handled by `KnownFragmentNamesRule`. + return false + } + + // Rather than following an immutable programming pattern which has + // significant memory and garbage collection overhead, we've opted to + // take a mutable approach for efficiency's sake. Importantly visiting a + // fragment twice is fine, so long as you don't do one visit inside the + // other. + visitedFragments[fragmentName] = true + defer delete(visitedFragments, fragmentName) + return checkDepthSelectionSet(fragment.SelectionSet, visitedFragments, depth) +} + +func init() { + AddRule(MaxIntrospectionDepth.Name, MaxIntrospectionDepth.RuleFunc) +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_fragment_cycles.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go similarity index 83% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_fragment_cycles.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go index edc562ddd..4e7907e24 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_fragment_cycles.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_fragment_cycles.go @@ -1,17 +1,18 @@ -package validator +package rules import ( "fmt" "strings" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("NoFragmentCycles", func(observers *Events, addError AddErrFunc) { +var NoFragmentCyclesRule = Rule{ + Name: "NoFragmentCycles", + RuleFunc: func(observers *Events, addError AddErrFunc) { visitedFrags := make(map[string]bool) observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { @@ -51,7 +52,7 @@ func init() { } var via string if len(fragmentNames) != 0 { - via = " via " + strings.Join(fragmentNames, ", ") + via = fmt.Sprintf(" via %s", strings.Join(fragmentNames, ", ")) } addError( Message(`Cannot spread fragment "%s" within itself%s.`, spreadName, via), @@ -67,7 +68,11 @@ func init() { recursive(fragment) }) - }) + }, +} + +func init() { + AddRule(NoFragmentCyclesRule.Name, NoFragmentCyclesRule.RuleFunc) } func getFragmentSpreads(node ast.SelectionSet) []*ast.FragmentSpread { diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_undefined_variables.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go similarity index 57% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_undefined_variables.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go index e45a5e3d5..64f2dc776 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/no_undefined_variables.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_undefined_variables.go @@ -1,14 +1,15 @@ -package validator +package rules import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("NoUndefinedVariables", func(observers *Events, addError AddErrFunc) { +var NoUndefinedVariablesRule = Rule{ + Name: "NoUndefinedVariables", + RuleFunc: func(observers *Events, addError AddErrFunc) { observers.OnValue(func(walker *Walker, value *ast.Value) { if walker.CurrentOperation == nil || value.Kind != ast.Variable || value.VariableDefinition != nil { return @@ -26,5 +27,9 @@ func init() { ) } }) - }) + }, +} + +func init() { + AddRule(NoUndefinedVariablesRule.Name, NoUndefinedVariablesRule.RuleFunc) } diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go new file mode 100644 index 000000000..a914ee6d3 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_fragments.go @@ -0,0 +1,36 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var NoUnusedFragmentsRule = Rule{ + Name: "NoUnusedFragments", + RuleFunc: func(observers *Events, addError AddErrFunc) { + inFragmentDefinition := false + fragmentNameUsed := make(map[string]bool) + + observers.OnFragmentSpread(func(walker *Walker, fragmentSpread *ast.FragmentSpread) { + if !inFragmentDefinition { + fragmentNameUsed[fragmentSpread.Name] = true + } + }) + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + inFragmentDefinition = true + if !fragmentNameUsed[fragment.Name] { + addError( + Message(`Fragment "%s" is never used.`, fragment.Name), + At(fragment.Position), + ) + } + }) + }, +} + +func init() { + AddRule(NoUnusedFragmentsRule.Name, NoUnusedFragmentsRule.RuleFunc) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go new file mode 100644 index 000000000..daed80ebb --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/no_unused_variables.go @@ -0,0 +1,37 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var NoUnusedVariablesRule = Rule{ + Name: "NoUnusedVariables", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + for _, varDef := range operation.VariableDefinitions { + if varDef.Used { + continue + } + + if operation.Name != "" { + addError( + Message(`Variable "$%s" is never used in operation "%s".`, varDef.Variable, operation.Name), + At(varDef.Position), + ) + } else { + addError( + Message(`Variable "$%s" is never used.`, varDef.Variable), + At(varDef.Position), + ) + } + } + }) + }, +} + +func init() { + AddRule(NoUnusedVariablesRule.Name, NoUnusedVariablesRule.RuleFunc) +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/overlapping_fields_can_be_merged.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go similarity index 97% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/overlapping_fields_can_be_merged.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go index 1e207a43e..129568220 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/overlapping_fields_can_be_merged.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/overlapping_fields_can_be_merged.go @@ -1,19 +1,19 @@ -package validator +package rules import ( "bytes" "fmt" "reflect" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - - AddRule("OverlappingFieldsCanBeMerged", func(observers *Events, addError AddErrFunc) { +var OverlappingFieldsCanBeMergedRule = Rule{ + Name: "OverlappingFieldsCanBeMerged", + RuleFunc: func(observers *Events, addError AddErrFunc) { /** * Algorithm: * @@ -105,7 +105,11 @@ func init() { conflict.addFieldsConflictMessage(addError) } }) - }) + }, +} + +func init() { + AddRule(OverlappingFieldsCanBeMergedRule.Name, OverlappingFieldsCanBeMergedRule.RuleFunc) } type pairSet struct { @@ -304,10 +308,8 @@ func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFieldsAndFr } func (m *overlappingFieldsCanBeMergedManager) collectConflictsBetweenFragments(conflicts *conflictMessageContainer, areMutuallyExclusive bool, fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) { - var check func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) check = func(fragmentSpreadA *ast.FragmentSpread, fragmentSpreadB *ast.FragmentSpread) { - if fragmentSpreadA.Name == fragmentSpreadB.Name { return } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/possible_fragment_spreads.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go similarity index 82% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/possible_fragment_spreads.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go index 79cb20c49..b81f37565 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/possible_fragment_spreads.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/possible_fragment_spreads.go @@ -1,15 +1,15 @@ -package validator +package rules import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("PossibleFragmentSpreads", func(observers *Events, addError AddErrFunc) { - +var PossibleFragmentSpreadsRule = Rule{ + Name: "PossibleFragmentSpreads", + RuleFunc: func(observers *Events, addError AddErrFunc) { validate := func(walker *Walker, parentDef *ast.Definition, fragmentName string, emitError func()) { if parentDef == nil { return @@ -66,5 +66,9 @@ func init() { ) }) }) - }) + }, +} + +func init() { + AddRule(PossibleFragmentSpreadsRule.Name, PossibleFragmentSpreadsRule.RuleFunc) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/provided_required_arguments.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go similarity index 67% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/provided_required_arguments.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go index d6d12c4fd..90667af23 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/provided_required_arguments.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/provided_required_arguments.go @@ -1,15 +1,15 @@ -package validator +package rules import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + "github.com/vektah/gqlparser/v2/ast" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("ProvidedRequiredArguments", func(observers *Events, addError AddErrFunc) { - observers.OnField(func(_ *Walker, field *ast.Field) { +var ProvidedRequiredArgumentsRule = Rule{ + Name: "ProvidedRequiredArguments", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { if field.Definition == nil { return } @@ -35,7 +35,7 @@ func init() { } }) - observers.OnDirective(func(_ *Walker, directive *ast.Directive) { + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { if directive.Definition == nil { return } @@ -60,5 +60,9 @@ func init() { ) } }) - }) + }, +} + +func init() { + AddRule(ProvidedRequiredArgumentsRule.Name, ProvidedRequiredArgumentsRule.RuleFunc) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/scalar_leafs.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go similarity index 68% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/scalar_leafs.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go index cd17b47c8..73a1e8967 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/scalar_leafs.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/scalar_leafs.go @@ -1,14 +1,15 @@ -package validator +package rules import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("ScalarLeafs", func(observers *Events, addError AddErrFunc) { +var ScalarLeafsRule = Rule{ + Name: "ScalarLeafs", + RuleFunc: func(observers *Events, addError AddErrFunc) { observers.OnField(func(walker *Walker, field *ast.Field) { if field.Definition == nil { return @@ -34,5 +35,9 @@ func init() { ) } }) - }) + }, +} + +func init() { + AddRule(ScalarLeafsRule.Name, ScalarLeafsRule.RuleFunc) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/single_field_subscriptions.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go similarity index 82% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/single_field_subscriptions.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go index 98cb984b4..1498d8298 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/single_field_subscriptions.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/single_field_subscriptions.go @@ -1,17 +1,18 @@ -package validator +package rules import ( "strconv" "strings" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("SingleFieldSubscriptions", func(observers *Events, addError AddErrFunc) { +var SingleFieldSubscriptionsRule = Rule{ + Name: "SingleFieldSubscriptions", + RuleFunc: func(observers *Events, addError AddErrFunc) { observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { if walker.Schema.Subscription == nil || operation.Operation != ast.Subscription { return @@ -40,7 +41,11 @@ func init() { } } }) - }) + }, +} + +func init() { + AddRule(SingleFieldSubscriptionsRule.Name, SingleFieldSubscriptionsRule.RuleFunc) } type topField struct { diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go new file mode 100644 index 000000000..b90cc6510 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_argument_names.go @@ -0,0 +1,40 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var UniqueArgumentNamesRule = Rule{ + Name: "UniqueArgumentNames", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnField(func(walker *Walker, field *ast.Field) { + checkUniqueArgs(field.Arguments, addError) + }) + + observers.OnDirective(func(walker *Walker, directive *ast.Directive) { + checkUniqueArgs(directive.Arguments, addError) + }) + }, +} + +func init() { + AddRule(UniqueArgumentNamesRule.Name, UniqueArgumentNamesRule.RuleFunc) +} + +func checkUniqueArgs(args ast.ArgumentList, addError AddErrFunc) { + knownArgNames := map[string]int{} + + for _, arg := range args { + if knownArgNames[arg.Name] == 1 { + addError( + Message(`There can be only one argument named "%s".`, arg.Name), + At(arg.Position), + ) + } + + knownArgNames[arg.Name]++ + } +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go new file mode 100644 index 000000000..4222f36ae --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_directives_per_location.go @@ -0,0 +1,31 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var UniqueDirectivesPerLocationRule = Rule{ + Name: "UniqueDirectivesPerLocation", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnDirectiveList(func(walker *Walker, directives []*ast.Directive) { + seen := map[string]bool{} + + for _, dir := range directives { + if dir.Name != "repeatable" && seen[dir.Name] { + addError( + Message(`The directive "@%s" can only be used once at this location.`, dir.Name), + At(dir.Position), + ) + } + seen[dir.Name] = true + } + }) + }, +} + +func init() { + AddRule(UniqueDirectivesPerLocationRule.Name, UniqueDirectivesPerLocationRule.RuleFunc) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go new file mode 100644 index 000000000..aab8eeb4e --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_fragment_names.go @@ -0,0 +1,29 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var UniqueFragmentNamesRule = Rule{ + Name: "UniqueFragmentNames", + RuleFunc: func(observers *Events, addError AddErrFunc) { + seenFragments := map[string]bool{} + + observers.OnFragment(func(walker *Walker, fragment *ast.FragmentDefinition) { + if seenFragments[fragment.Name] { + addError( + Message(`There can be only one fragment named "%s".`, fragment.Name), + At(fragment.Position), + ) + } + seenFragments[fragment.Name] = true + }) + }, +} + +func init() { + AddRule(UniqueFragmentNamesRule.Name, UniqueFragmentNamesRule.RuleFunc) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go new file mode 100644 index 000000000..250849344 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_input_field_names.go @@ -0,0 +1,34 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var UniqueInputFieldNamesRule = Rule{ + Name: "UniqueInputFieldNames", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if value.Kind != ast.ObjectValue { + return + } + + seen := map[string]bool{} + for _, field := range value.Children { + if seen[field.Name] { + addError( + Message(`There can be only one input field named "%s".`, field.Name), + At(field.Position), + ) + } + seen[field.Name] = true + } + }) + }, +} + +func init() { + AddRule(UniqueInputFieldNamesRule.Name, UniqueInputFieldNamesRule.RuleFunc) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go new file mode 100644 index 000000000..6f1ec26ab --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_operation_names.go @@ -0,0 +1,29 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var UniqueOperationNamesRule = Rule{ + Name: "UniqueOperationNames", + RuleFunc: func(observers *Events, addError AddErrFunc) { + seen := map[string]bool{} + + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + if seen[operation.Name] { + addError( + Message(`There can be only one operation named "%s".`, operation.Name), + At(operation.Position), + ) + } + seen[operation.Name] = true + }) + }, +} + +func init() { + AddRule(UniqueOperationNamesRule.Name, UniqueOperationNamesRule.RuleFunc) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go new file mode 100644 index 000000000..6b037ed52 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/unique_variable_names.go @@ -0,0 +1,31 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var UniqueVariableNamesRule = Rule{ + Name: "UniqueVariableNames", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + seen := map[string]int{} + for _, def := range operation.VariableDefinitions { + // add the same error only once per a variable. + if seen[def.Variable] == 1 { + addError( + Message(`There can be only one variable named "$%s".`, def.Variable), + At(def.Position), + ) + } + seen[def.Variable]++ + } + }) + }, +} + +func init() { + AddRule(UniqueVariableNamesRule.Name, UniqueVariableNamesRule.RuleFunc) +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go new file mode 100644 index 000000000..01510b7b5 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/values_of_correct_type.go @@ -0,0 +1,250 @@ +package rules + +import ( + "errors" + "fmt" + "strconv" + + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +func ruleFuncValuesOfCorrectType(observers *Events, addError AddErrFunc, disableSuggestion bool) { + observers.OnValue(func(walker *Walker, value *ast.Value) { + if value.Definition == nil || value.ExpectedType == nil { + return + } + + if value.Kind == ast.NullValue && value.ExpectedType.NonNull { + addError( + Message(`Expected value of type "%s", found %s.`, value.ExpectedType.String(), value.String()), + At(value.Position), + ) + } + + if value.Definition.Kind == ast.Scalar { + // Skip custom validating scalars + if !value.Definition.OneOf("Int", "Float", "String", "Boolean", "ID") { + return + } + } + + var possibleEnums []string + if value.Definition.Kind == ast.Enum { + for _, val := range value.Definition.EnumValues { + possibleEnums = append(possibleEnums, val.Name) + } + } + + rawVal, err := value.Value(nil) + if err != nil { + unexpectedTypeMessage(addError, value) + } + + switch value.Kind { + case ast.NullValue: + return + case ast.ListValue: + if value.ExpectedType.Elem == nil { + unexpectedTypeMessage(addError, value) + return + } + + case ast.IntValue: + if !value.Definition.OneOf("Int", "Float", "ID") { + unexpectedTypeMessage(addError, value) + } + + case ast.FloatValue: + if !value.Definition.OneOf("Float") { + unexpectedTypeMessage(addError, value) + } + + case ast.StringValue, ast.BlockValue: + if value.Definition.Kind == ast.Enum { + if disableSuggestion { + addError( + Message(`Enum "%s" cannot represent non-enum value: %s.`, value.ExpectedType.String(), value.String()), + At(value.Position), + ) + } else { + rawValStr := fmt.Sprint(rawVal) + addError( + Message(`Enum "%s" cannot represent non-enum value: %s.`, value.ExpectedType.String(), value.String()), + SuggestListQuoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } + } else if !value.Definition.OneOf("String", "ID") { + unexpectedTypeMessage(addError, value) + } + + case ast.EnumValue: + if value.Definition.Kind != ast.Enum { + if disableSuggestion { + addError( + unexpectedTypeMessageOnly(value), + At(value.Position), + ) + } else { + rawValStr := fmt.Sprint(rawVal) + addError( + unexpectedTypeMessageOnly(value), + SuggestListUnquoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } + } else if value.Definition.EnumValues.ForName(value.Raw) == nil { + if disableSuggestion { + addError( + Message(`Value "%s" does not exist in "%s" enum.`, value.String(), value.ExpectedType.String()), + At(value.Position), + ) + } else { + rawValStr := fmt.Sprint(rawVal) + addError( + Message(`Value "%s" does not exist in "%s" enum.`, value.String(), value.ExpectedType.String()), + SuggestListQuoted("Did you mean the enum value", rawValStr, possibleEnums), + At(value.Position), + ) + } + } + + case ast.BooleanValue: + if !value.Definition.OneOf("Boolean") { + unexpectedTypeMessage(addError, value) + } + + case ast.ObjectValue: + + for _, field := range value.Definition.Fields { + if field.Type.NonNull { + fieldValue := value.Children.ForName(field.Name) + if fieldValue == nil && field.DefaultValue == nil { + addError( + Message(`Field "%s.%s" of required type "%s" was not provided.`, value.Definition.Name, field.Name, field.Type.String()), + At(value.Position), + ) + continue + } + } + } + + for _, directive := range value.Definition.Directives { + if directive.Name == "oneOf" { + func() { + if len(value.Children) != 1 { + addError( + Message(`OneOf Input Object "%s" must specify exactly one key.`, value.Definition.Name), + At(value.Position), + ) + return + } + + fieldValue := value.Children[0].Value + isNullLiteral := fieldValue == nil || fieldValue.Kind == ast.NullValue + if isNullLiteral { + addError( + Message(`Field "%s.%s" must be non-null.`, value.Definition.Name, value.Definition.Fields[0].Name), + At(fieldValue.Position), + ) + return + } + + isVariable := fieldValue.Kind == ast.Variable + if isVariable { + variableName := fieldValue.VariableDefinition.Variable + isNullableVariable := !fieldValue.VariableDefinition.Type.NonNull + if isNullableVariable { + addError( + Message(`Variable "%s" must be non-nullable to be used for OneOf Input Object "%s".`, variableName, value.Definition.Name), + At(fieldValue.Position), + ) + } + } + }() + } + } + + for _, fieldValue := range value.Children { + if value.Definition.Fields.ForName(fieldValue.Name) == nil { + if disableSuggestion { + addError( + Message(`Field "%s" is not defined by type "%s".`, fieldValue.Name, value.Definition.Name), + At(fieldValue.Position), + ) + } else { + var suggestions []string + for _, fieldValue := range value.Definition.Fields { + suggestions = append(suggestions, fieldValue.Name) + } + + addError( + Message(`Field "%s" is not defined by type "%s".`, fieldValue.Name, value.Definition.Name), + SuggestListQuoted("Did you mean", fieldValue.Name, suggestions), + At(fieldValue.Position), + ) + } + } + } + + case ast.Variable: + return + + default: + panic(fmt.Errorf("unhandled %T", value)) + } + }) +} + +var ValuesOfCorrectTypeRule = Rule{ + Name: "ValuesOfCorrectType", + RuleFunc: func(observers *Events, addError AddErrFunc) { + ruleFuncValuesOfCorrectType(observers, addError, false) + }, +} + +var ValuesOfCorrectTypeRuleWithoutSuggestions = Rule{ + Name: "ValuesOfCorrectTypeWithoutSuggestions", + RuleFunc: func(observers *Events, addError AddErrFunc) { + ruleFuncValuesOfCorrectType(observers, addError, true) + }, +} + +func init() { + AddRule(ValuesOfCorrectTypeRule.Name, ValuesOfCorrectTypeRule.RuleFunc) +} + +func unexpectedTypeMessage(addError AddErrFunc, v *ast.Value) { + addError( + unexpectedTypeMessageOnly(v), + At(v.Position), + ) +} + +func unexpectedTypeMessageOnly(v *ast.Value) ErrorOption { + switch v.ExpectedType.String() { + case "Int", "Int!": + if _, err := strconv.ParseInt(v.Raw, 10, 32); err != nil && errors.Is(err, strconv.ErrRange) { + return Message(`Int cannot represent non 32-bit signed integer value: %s`, v.String()) + } + return Message(`Int cannot represent non-integer value: %s`, v.String()) + case "String", "String!", "[String]": + return Message(`String cannot represent a non string value: %s`, v.String()) + case "Boolean", "Boolean!": + return Message(`Boolean cannot represent a non boolean value: %s`, v.String()) + case "Float", "Float!": + return Message(`Float cannot represent non numeric value: %s`, v.String()) + case "ID", "ID!": + return Message(`ID cannot represent a non-string and non-integer value: %s`, v.String()) + // case "Enum": + // return Message(`Enum "%s" cannot represent non-enum value: %s`, v.ExpectedType.String(), v.String()) + default: + if v.Definition.Kind == ast.Enum { + return Message(`Enum "%s" cannot represent non-enum value: %s.`, v.ExpectedType.String(), v.String()) + } + return Message(`Expected value of type "%s", found %s.`, v.ExpectedType.String(), v.String()) + } +} diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go new file mode 100644 index 000000000..e1bf2b1f5 --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_are_input_types.go @@ -0,0 +1,35 @@ +package rules + +import ( + "github.com/vektah/gqlparser/v2/ast" + + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" +) + +var VariablesAreInputTypesRule = Rule{ + Name: "VariablesAreInputTypes", + RuleFunc: func(observers *Events, addError AddErrFunc) { + observers.OnOperation(func(walker *Walker, operation *ast.OperationDefinition) { + for _, def := range operation.VariableDefinitions { + if def.Definition == nil { + continue + } + if !def.Definition.IsInputType() { + addError( + Message( + `Variable "$%s" cannot be non-input type "%s".`, + def.Variable, + def.Type.String(), + ), + At(def.Position), + ) + } + } + }) + }, +} + +func init() { + AddRule(VariablesAreInputTypesRule.Name, VariablesAreInputTypesRule.RuleFunc) +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_in_allowed_position.go b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go similarity index 67% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_in_allowed_position.go rename to vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go index 08a8e18c0..f05ee687a 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/rules/variables_in_allowed_position.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/rules/variables_in_allowed_position.go @@ -1,14 +1,15 @@ -package validator +package rules import ( - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" - //nolint:revive // Validator rules each use dot imports for convenience. - . "github.com/open-policy-agent/opa/internal/gqlparser/validator" + //nolint:staticcheck // Validator rules each use dot imports for convenience. + . "github.com/vektah/gqlparser/v2/validator" ) -func init() { - AddRule("VariablesInAllowedPosition", func(observers *Events, addError AddErrFunc) { +var VariablesInAllowedPositionRule = Rule{ + Name: "VariablesInAllowedPosition", + RuleFunc: func(observers *Events, addError AddErrFunc) { observers.OnValue(func(walker *Walker, value *ast.Value) { if value.Kind != ast.Variable || value.ExpectedType == nil || value.VariableDefinition == nil || walker.CurrentOperation == nil { return @@ -36,5 +37,9 @@ func init() { ) } }) - }) + }, +} + +func init() { + AddRule(VariablesInAllowedPositionRule.Name, VariablesInAllowedPositionRule.RuleFunc) } diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/schema.go b/vendor/github.com/vektah/gqlparser/v2/validator/schema.go similarity index 86% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/schema.go rename to vendor/github.com/vektah/gqlparser/v2/validator/schema.go index c9c542195..a8754afc2 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/schema.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/schema.go @@ -5,21 +5,20 @@ import ( "strconv" "strings" - //nolint:revive - . "github.com/open-policy-agent/opa/internal/gqlparser/ast" - "github.com/open-policy-agent/opa/internal/gqlparser/gqlerror" - "github.com/open-policy-agent/opa/internal/gqlparser/parser" + . "github.com/vektah/gqlparser/v2/ast" //nolint:staticcheck // bad, yeah + "github.com/vektah/gqlparser/v2/gqlerror" + "github.com/vektah/gqlparser/v2/parser" ) func LoadSchema(inputs ...*Source) (*Schema, error) { - ast, err := parser.ParseSchemas(inputs...) + sd, err := parser.ParseSchemas(inputs...) if err != nil { - return nil, err + return nil, gqlerror.WrapIfUnwrapped(err) } - return ValidateSchemaDocument(ast) + return ValidateSchemaDocument(sd) } -func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { +func ValidateSchemaDocument(sd *SchemaDocument) (*Schema, error) { schema := Schema{ Types: map[string]*Definition{}, Directives: map[string]*DirectiveDefinition{}, @@ -27,16 +26,16 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { Implements: map[string][]*Definition{}, } - for i, def := range ast.Definitions { + for i, def := range sd.Definitions { if schema.Types[def.Name] != nil { return nil, gqlerror.ErrorPosf(def.Position, "Cannot redeclare type %s.", def.Name) } - schema.Types[def.Name] = ast.Definitions[i] + schema.Types[def.Name] = sd.Definitions[i] } - defs := append(DefinitionList{}, ast.Definitions...) + defs := append(DefinitionList{}, sd.Definitions...) - for _, ext := range ast.Extensions { + for _, ext := range sd.Extensions { def := schema.Types[ext.Name] if def == nil { schema.Types[ext.Name] = &Definition{ @@ -80,13 +79,13 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { } } - for i, dir := range ast.Directives { + for i, dir := range sd.Directives { if schema.Directives[dir.Name] != nil { // While the spec says SDL must not (§3.5) explicitly define builtin // scalars, it may (§3.13) define builtin directives. Here we check for // that, and reject doubly-defined directives otherwise. switch dir.Name { - case "include", "skip", "deprecated", "specifiedBy": // the builtins + case "include", "skip", "deprecated", "specifiedBy", "defer", "oneOf": // the builtins // In principle here we might want to validate that the // directives are the same. But they might not be, if the // server has an older spec than we do. (Plus, validating this @@ -99,16 +98,16 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { return nil, gqlerror.ErrorPosf(dir.Position, "Cannot redeclare directive %s.", dir.Name) } } - schema.Directives[dir.Name] = ast.Directives[i] + schema.Directives[dir.Name] = sd.Directives[i] } - if len(ast.Schema) > 1 { - return nil, gqlerror.ErrorPosf(ast.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.") + if len(sd.Schema) > 1 { + return nil, gqlerror.ErrorPosf(sd.Schema[1].Position, "Cannot have multiple schema entry points, consider schema extensions instead.") } - if len(ast.Schema) == 1 { - schema.Description = ast.Schema[0].Description - for _, entrypoint := range ast.Schema[0].OperationTypes { + if len(sd.Schema) == 1 { + schema.Description = sd.Schema[0].Description + for _, entrypoint := range sd.Schema[0].OperationTypes { def := schema.Types[entrypoint.Type] if def == nil { return nil, gqlerror.ErrorPosf(entrypoint.Position, "Schema root %s refers to a type %s that does not exist.", entrypoint.Operation, entrypoint.Type) @@ -122,9 +121,13 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { schema.Subscription = def } } + if err := validateDirectives(&schema, sd.Schema[0].Directives, LocationSchema, nil); err != nil { + return nil, err + } + schema.SchemaDirectives = append(schema.SchemaDirectives, sd.Schema[0].Directives...) } - for _, ext := range ast.SchemaExtension { + for _, ext := range sd.SchemaExtension { for _, entrypoint := range ext.OperationTypes { def := schema.Types[entrypoint.Type] if def == nil { @@ -139,6 +142,10 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { schema.Subscription = def } } + if err := validateDirectives(&schema, ext.Directives, LocationSchema, nil); err != nil { + return nil, err + } + schema.SchemaDirectives = append(schema.SchemaDirectives, ext.Directives...) } if err := validateTypeDefinitions(&schema); err != nil { @@ -152,7 +159,7 @@ func ValidateSchemaDocument(ast *SchemaDocument) (*Schema, error) { // Inferred root operation type names should be performed only when a `schema` directive is // **not** provided, when it is, `Mutation` and `Subscription` becomes valid types and are not // assigned as a root operation on the schema. - if len(ast.Schema) == 0 { + if len(sd.Schema) == 0 { if schema.Query == nil && schema.Types["Query"] != nil { schema.Query = schema.Types["Query"] } @@ -284,6 +291,9 @@ func validateDefinition(schema *Schema, def *Definition) *gqlerror.Error { return gqlerror.ErrorPosf(def.Position, "%s %s: non-enum value %s.", def.Kind, def.Name, value.Name) } } + if err := validateDirectives(schema, value.Directives, LocationEnumValue, nil); err != nil { + return err + } } case InputObject: if len(def.Fields) == 0 { @@ -359,11 +369,12 @@ func validateDirectives(schema *Schema, dirs DirectiveList, location DirectiveLo if currentDirective != nil && dir.Name == currentDirective.Name { return gqlerror.ErrorPosf(dir.Position, "Directive %s cannot refer to itself.", currentDirective.Name) } - if schema.Directives[dir.Name] == nil { + dirDefinition := schema.Directives[dir.Name] + if dirDefinition == nil { return gqlerror.ErrorPosf(dir.Position, "Undefined directive %s.", dir.Name) } validKind := false - for _, dirLocation := range schema.Directives[dir.Name].Locations { + for _, dirLocation := range dirDefinition.Locations { if dirLocation == location { validKind = true break @@ -372,6 +383,18 @@ func validateDirectives(schema *Schema, dirs DirectiveList, location DirectiveLo if !validKind { return gqlerror.ErrorPosf(dir.Position, "Directive %s is not applicable on %s.", dir.Name, location) } + for _, arg := range dir.Arguments { + if dirDefinition.Arguments.ForName(arg.Name) == nil { + return gqlerror.ErrorPosf(arg.Position, "Undefined argument %s for directive %s.", arg.Name, dir.Name) + } + } + for _, schemaArg := range dirDefinition.Arguments { + if schemaArg.Type.NonNull && schemaArg.DefaultValue == nil { + if arg := dir.Arguments.ForName(schemaArg.Name); arg == nil || arg.Value.Kind == NullValue { + return gqlerror.ErrorPosf(dir.Position, "Argument %s for directive %s cannot be null.", schemaArg.Name, dir.Name) + } + } + } dir.Definition = schema.Directives[dir.Name] } return nil @@ -379,7 +402,7 @@ func validateDirectives(schema *Schema, dirs DirectiveList, location DirectiveLo func validateImplements(schema *Schema, def *Definition, intfName string) *gqlerror.Error { // see validation rules at the bottom of - // https://facebook.github.io/graphql/October2021/#sec-Objects + // https://spec.graphql.org/October2021/#sec-Objects intf := schema.Types[intfName] if intf == nil { return gqlerror.ErrorPosf(def.Position, "Undefined type %s.", strconv.Quote(intfName)) diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/schema_test.yml b/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml similarity index 92% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/schema_test.yml rename to vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml index 7034a4697..22f125bec 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/schema_test.yml +++ b/vendor/github.com/vektah/gqlparser/v2/validator/schema_test.yml @@ -80,6 +80,15 @@ object types: message: 'Name "__id" must not begin with "__", which is reserved by GraphQL introspection.' locations: [{line: 2, column: 3}] + - name: field argument list must not be empty + input: | + type FooBar { + foo(): ID + } + error: + message: 'expected at least one definition, found )' + locations: [{line: 2, column: 7}] + - name: check reserved names on type field argument input: | type FooBar { @@ -528,7 +537,16 @@ directives: directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT - - name: must be declared + - name: must be declared (type) + input: | + type User @foo { + name: String + } + error: + message: "Undefined directive foo." + locations: [{line: 1, column: 12}] + + - name: must be declared (field) input: | type User { name: String @foo @@ -537,6 +555,15 @@ directives: message: "Undefined directive foo." locations: [{line: 2, column: 17}] + - name: must be declared (enum) + input: | + enum Unit { + METER @foo + } + error: + message: "Undefined directive foo." + locations: [{line: 2, column: 10}] + - name: cannot be self-referential input: | directive @A(foo: Int! @A) on FIELD_DEFINITION @@ -604,6 +631,32 @@ directives: type P { name: String @testField } interface I { id: ID @testField } + - name: Invalid directive argument not allowed + input: | + directive @foo(bla: Int!) on FIELD_DEFINITION + type P {f: Int @foo(foobla: 11)} + + error: + message: 'Undefined argument foobla for directive foo.' + locations: [{line: 2, column: 21}] + + - name: non-null argument must be provided + input: | + directive @foo(bla: Int!) on FIELD_DEFINITION + type P {f: Int @foo } + + error: + message: 'Argument bla for directive foo cannot be null.' + locations: [{line: 2, column: 17}] + + - name: non-null argument must not be null + input: | + directive @foo(bla: Int!) on FIELD_DEFINITION + type P {f: Int @foo(bla: null) } + + error: + message: 'Argument bla for directive foo cannot be null.' + locations: [{line: 2, column: 17}] entry points: - name: multiple schema entry points diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/suggestionList.go b/vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go similarity index 100% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/suggestionList.go rename to vendor/github.com/vektah/gqlparser/v2/validator/suggestionList.go diff --git a/vendor/github.com/vektah/gqlparser/v2/validator/validator.go b/vendor/github.com/vektah/gqlparser/v2/validator/validator.go new file mode 100644 index 000000000..1b4040c2c --- /dev/null +++ b/vendor/github.com/vektah/gqlparser/v2/validator/validator.go @@ -0,0 +1,93 @@ +package validator + +import ( + //nolint:staticcheck // bad, yeah + . "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" +) + +type AddErrFunc func(options ...ErrorOption) + +type RuleFunc func(observers *Events, addError AddErrFunc) + +type Rule struct { + Name string + RuleFunc RuleFunc +} + +var specifiedRules []Rule + +// AddRule adds a rule to the rule set. +// ruleFunc is called once each time `Validate` is executed. +func AddRule(name string, ruleFunc RuleFunc) { + specifiedRules = append(specifiedRules, Rule{Name: name, RuleFunc: ruleFunc}) +} + +// RemoveRule removes an existing rule from the rule set +// if one of the same name exists. +// The rule set is global, so it is not safe for concurrent changes +func RemoveRule(name string) { + var result []Rule // nolint:prealloc // using initialized with len(rules) produces a race condition + for _, r := range specifiedRules { + if r.Name == name { + continue + } + result = append(result, r) + } + specifiedRules = result +} + +// ReplaceRule replaces an existing rule from the rule set +// if one of the same name exists. +// If no match is found, it will add a new rule to the rule set. +// The rule set is global, so it is not safe for concurrent changes +func ReplaceRule(name string, ruleFunc RuleFunc) { + var found bool + var result []Rule // nolint:prealloc // using initialized with len(rules) produces a race condition + for _, r := range specifiedRules { + if r.Name == name { + found = true + result = append(result, Rule{Name: name, RuleFunc: ruleFunc}) + continue + } + result = append(result, r) + } + if !found { + specifiedRules = append(specifiedRules, Rule{Name: name, RuleFunc: ruleFunc}) + return + } + specifiedRules = result +} + +func Validate(schema *Schema, doc *QueryDocument, rules ...Rule) gqlerror.List { + if rules == nil { + rules = specifiedRules + } + + var errs gqlerror.List + if schema == nil { + errs = append(errs, gqlerror.Errorf("cannot validate as Schema is nil")) + } + if doc == nil { + errs = append(errs, gqlerror.Errorf("cannot validate as QueryDocument is nil")) + } + if len(errs) > 0 { + return errs + } + observers := &Events{} + for i := range rules { + rule := rules[i] + rule.RuleFunc(observers, func(options ...ErrorOption) { + err := &gqlerror.Error{ + Rule: rule.Name, + } + for _, o := range options { + o(err) + } + errs = append(errs, err) + }) + } + + Walk(schema, doc, observers) + return errs +} diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/vars.go b/vendor/github.com/vektah/gqlparser/v2/validator/vars.go similarity index 94% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/vars.go rename to vendor/github.com/vektah/gqlparser/v2/validator/vars.go index 66924148b..205a7fb51 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/vars.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/vars.go @@ -2,17 +2,17 @@ package validator import ( "encoding/json" - "errors" "fmt" "reflect" "strconv" "strings" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" - "github.com/open-policy-agent/opa/internal/gqlparser/gqlerror" + "github.com/vektah/gqlparser/v2/ast" + "github.com/vektah/gqlparser/v2/gqlerror" ) -var ErrUnexpectedType = errors.New("Unexpected Type") +//nolint:staticcheck // We do not care about capitalized error strings +var ErrUnexpectedType = fmt.Errorf("Unexpected Type") // VariableValues coerces and validates variable values func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables map[string]interface{}) (map[string]interface{}, error) { @@ -56,19 +56,19 @@ func VariableValues(schema *ast.Schema, op *ast.OperationDefinition, variables m jsonNumber, isJSONNumber := val.(json.Number) if isJSONNumber { - if v.Type.NamedType == "Int" { + switch v.Type.NamedType { + case "Int": n, err := jsonNumber.Int64() if err != nil { return nil, gqlerror.ErrorPathf(validator.path, "cannot use value %d as %s", n, v.Type.NamedType) } rv = reflect.ValueOf(n) - } else if v.Type.NamedType == "Float" { + case "Float": f, err := jsonNumber.Float64() if err != nil { return nil, gqlerror.ErrorPathf(validator.path, "cannot use value %f as %s", f, v.Type.NamedType) } rv = reflect.ValueOf(f) - } } if rv.Kind() == reflect.Ptr || rv.Kind() == reflect.Interface { @@ -107,7 +107,7 @@ func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) (reflec slc = reflect.Append(slc, val) val = slc } - for i := range val.Len() { + for i := 0; i < val.Len(); i++ { resetPath() v.path = append(v.path, ast.PathIndex(i)) field := val.Index(i) @@ -182,7 +182,7 @@ func (v *varValidator) validateVarType(typ *ast.Type, val reflect.Value) (reflec return val, gqlerror.ErrorPathf(v.path, "cannot use %s as %s", kind.String(), typ.NamedType) case ast.InputObject: if val.Kind() != reflect.Map { - return val, gqlerror.ErrorPathf(v.path, "must be a %s", def.Name) + return val, gqlerror.ErrorPathf(v.path, "must be a %s, not a %s", def.Name, val.Kind()) } // check for unknown fields diff --git a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/walk.go b/vendor/github.com/vektah/gqlparser/v2/validator/walk.go similarity index 98% rename from vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/walk.go rename to vendor/github.com/vektah/gqlparser/v2/validator/walk.go index f72287186..d3140746f 100644 --- a/vendor/github.com/open-policy-agent/opa/internal/gqlparser/validator/walk.go +++ b/vendor/github.com/vektah/gqlparser/v2/validator/walk.go @@ -4,7 +4,7 @@ import ( "context" "fmt" - "github.com/open-policy-agent/opa/internal/gqlparser/ast" + "github.com/vektah/gqlparser/v2/ast" ) type Events struct { @@ -22,27 +22,35 @@ type Events struct { func (o *Events) OnOperation(f func(walker *Walker, operation *ast.OperationDefinition)) { o.operationVisitor = append(o.operationVisitor, f) } + func (o *Events) OnField(f func(walker *Walker, field *ast.Field)) { o.field = append(o.field, f) } + func (o *Events) OnFragment(f func(walker *Walker, fragment *ast.FragmentDefinition)) { o.fragment = append(o.fragment, f) } + func (o *Events) OnInlineFragment(f func(walker *Walker, inlineFragment *ast.InlineFragment)) { o.inlineFragment = append(o.inlineFragment, f) } + func (o *Events) OnFragmentSpread(f func(walker *Walker, fragmentSpread *ast.FragmentSpread)) { o.fragmentSpread = append(o.fragmentSpread, f) } + func (o *Events) OnDirective(f func(walker *Walker, directive *ast.Directive)) { o.directive = append(o.directive, f) } + func (o *Events) OnDirectiveList(f func(walker *Walker, directives []*ast.Directive)) { o.directiveList = append(o.directiveList, f) } + func (o *Events) OnValue(f func(walker *Walker, value *ast.Value)) { o.value = append(o.value, f) } + func (o *Events) OnVariable(f func(walker *Walker, variable *ast.VariableDefinition)) { o.variable = append(o.variable, f) } @@ -277,7 +285,7 @@ func (w *Walker) walkSelection(parentDef *ast.Definition, it ast.Selection) { w.walkDirectives(nextParentDef, it.Directives, ast.LocationFragmentSpread) if def != nil && !w.validatedFragmentSpreads[def.Name] { - // prevent inifinite recursion + // prevent infinite recursion w.validatedFragmentSpreads[def.Name] = true w.walkSelectionSet(nextParentDef, def.SelectionSet) } diff --git a/vendor/modules.txt b/vendor/modules.txt index 34c553b32..f8a9778e9 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -419,7 +419,7 @@ github.com/fatih/color # github.com/felixge/httpsnoop v1.0.4 ## explicit; go 1.13 github.com/felixge/httpsnoop -# github.com/fsnotify/fsnotify v1.8.0 +# github.com/fsnotify/fsnotify v1.9.0 ## explicit; go 1.17 github.com/fsnotify/fsnotify github.com/fsnotify/fsnotify/internal @@ -1110,7 +1110,7 @@ github.com/onsi/gomega/matchers/support/goraph/edge github.com/onsi/gomega/matchers/support/goraph/node github.com/onsi/gomega/matchers/support/goraph/util github.com/onsi/gomega/types -# github.com/open-policy-agent/opa v1.4.2 +# github.com/open-policy-agent/opa v1.5.0 ## explicit; go 1.23.8 github.com/open-policy-agent/opa/ast github.com/open-policy-agent/opa/ast/json @@ -1130,12 +1130,6 @@ github.com/open-policy-agent/opa/internal/file/archive github.com/open-policy-agent/opa/internal/file/url github.com/open-policy-agent/opa/internal/future github.com/open-policy-agent/opa/internal/gojsonschema -github.com/open-policy-agent/opa/internal/gqlparser/ast -github.com/open-policy-agent/opa/internal/gqlparser/gqlerror -github.com/open-policy-agent/opa/internal/gqlparser/lexer -github.com/open-policy-agent/opa/internal/gqlparser/parser -github.com/open-policy-agent/opa/internal/gqlparser/validator -github.com/open-policy-agent/opa/internal/gqlparser/validator/rules github.com/open-policy-agent/opa/internal/json/patch github.com/open-policy-agent/opa/internal/jwx/buffer github.com/open-policy-agent/opa/internal/jwx/jwa @@ -1663,8 +1657,8 @@ github.com/prometheus/client_golang/prometheus/internal github.com/prometheus/client_golang/prometheus/promauto github.com/prometheus/client_golang/prometheus/promhttp github.com/prometheus/client_golang/prometheus/promhttp/internal -# github.com/prometheus/client_model v0.6.1 -## explicit; go 1.19 +# github.com/prometheus/client_model v0.6.2 +## explicit; go 1.22.0 github.com/prometheus/client_model/go # github.com/prometheus/common v0.62.0 ## explicit; go 1.21 @@ -1880,6 +1874,14 @@ github.com/unrolled/secure/cspbuilder # github.com/urfave/cli/v2 v2.27.6 ## explicit; go 1.18 github.com/urfave/cli/v2 +# github.com/vektah/gqlparser/v2 v2.5.26 +## explicit; go 1.22 +github.com/vektah/gqlparser/v2/ast +github.com/vektah/gqlparser/v2/gqlerror +github.com/vektah/gqlparser/v2/lexer +github.com/vektah/gqlparser/v2/parser +github.com/vektah/gqlparser/v2/validator +github.com/vektah/gqlparser/v2/validator/rules # github.com/vmihailenco/msgpack/v5 v5.4.1 ## explicit; go 1.19 github.com/vmihailenco/msgpack/v5