From 0205843faaefee3ecb41bae4b8c8c348885fac14 Mon Sep 17 00:00:00 2001
From: Nikita
Date: Fri, 9 Aug 2024 12:14:49 -0400
Subject: [PATCH] bump: eco-goinfra v0.0.0-20240809150049-0634a7a9fb27 (#146)
Co-authored-by: nkononov
---
go.mod | 12 +-
go.sum | 168 +-
.../github.com/antlr4-go/antlr/v4/.gitignore | 18 -
vendor/github.com/antlr4-go/antlr/v4/LICENSE | 28 -
.../github.com/antlr4-go/antlr/v4/README.md | 54 -
.../github.com/antlr4-go/antlr/v4/antlrdoc.go | 102 -
vendor/github.com/antlr4-go/antlr/v4/atn.go | 179 -
.../antlr4-go/antlr/v4/atn_config.go | 335 -
.../antlr4-go/antlr/v4/atn_config_set.go | 301 -
.../antlr/v4/atn_deserialization_options.go | 62 -
.../antlr4-go/antlr/v4/atn_deserializer.go | 684 --
.../antlr4-go/antlr/v4/atn_simulator.go | 41 -
.../antlr4-go/antlr/v4/atn_state.go | 461 --
.../github.com/antlr4-go/antlr/v4/atn_type.go | 11 -
.../antlr4-go/antlr/v4/char_stream.go | 12 -
.../antlr/v4/common_token_factory.go | 56 -
.../antlr4-go/antlr/v4/common_token_stream.go | 450 --
.../antlr4-go/antlr/v4/comparators.go | 150 -
.../antlr4-go/antlr/v4/configuration.go | 214 -
vendor/github.com/antlr4-go/antlr/v4/dfa.go | 175 -
.../antlr4-go/antlr/v4/dfa_serializer.go | 158 -
.../antlr4-go/antlr/v4/dfa_state.go | 170 -
.../antlr/v4/diagnostic_error_listener.go | 110 -
.../antlr4-go/antlr/v4/error_listener.go | 100 -
.../antlr4-go/antlr/v4/error_strategy.go | 702 --
.../github.com/antlr4-go/antlr/v4/errors.go | 259 -
.../antlr4-go/antlr/v4/file_stream.go | 67 -
.../antlr4-go/antlr/v4/input_stream.go | 157 -
.../antlr4-go/antlr/v4/int_stream.go | 16 -
.../antlr4-go/antlr/v4/interval_set.go | 330 -
.../github.com/antlr4-go/antlr/v4/jcollect.go | 685 --
vendor/github.com/antlr4-go/antlr/v4/lexer.go | 426 --
.../antlr4-go/antlr/v4/lexer_action.go | 452 --
.../antlr/v4/lexer_action_executor.go | 173 -
.../antlr4-go/antlr/v4/lexer_atn_simulator.go | 677 --
.../antlr4-go/antlr/v4/ll1_analyzer.go | 218 -
.../antlr4-go/antlr/v4/nostatistics.go | 47 -
.../github.com/antlr4-go/antlr/v4/parser.go | 700 --
.../antlr/v4/parser_atn_simulator.go | 1668 -----
.../antlr4-go/antlr/v4/parser_rule_context.go | 421 --
.../antlr4-go/antlr/v4/prediction_context.go | 727 --
.../antlr/v4/prediction_context_cache.go | 48 -
.../antlr4-go/antlr/v4/prediction_mode.go | 536 --
.../antlr4-go/antlr/v4/recognizer.go | 241 -
.../antlr4-go/antlr/v4/rule_context.go | 40 -
.../antlr4-go/antlr/v4/semantic_context.go | 464 --
.../antlr4-go/antlr/v4/statistics.go | 281 -
.../antlr4-go/antlr/v4/stats_data.go | 23 -
vendor/github.com/antlr4-go/antlr/v4/token.go | 213 -
.../antlr4-go/antlr/v4/token_source.go | 17 -
.../antlr4-go/antlr/v4/token_stream.go | 21 -
.../antlr/v4/tokenstream_rewriter.go | 662 --
.../antlr4-go/antlr/v4/trace_listener.go | 32 -
.../antlr4-go/antlr/v4/transition.go | 439 --
vendor/github.com/antlr4-go/antlr/v4/tree.go | 304 -
vendor/github.com/antlr4-go/antlr/v4/trees.go | 142 -
vendor/github.com/antlr4-go/antlr/v4/utils.go | 328 -
vendor/github.com/google/cel-go/LICENSE | 233 -
.../github.com/google/cel-go/cel/BUILD.bazel | 90 -
vendor/github.com/google/cel-go/cel/cel.go | 19 -
vendor/github.com/google/cel-go/cel/decls.go | 355 -
vendor/github.com/google/cel-go/cel/env.go | 881 ---
.../github.com/google/cel-go/cel/folding.go | 559 --
.../github.com/google/cel-go/cel/inlining.go | 240 -
vendor/github.com/google/cel-go/cel/io.go | 252 -
.../github.com/google/cel-go/cel/library.go | 784 --
vendor/github.com/google/cel-go/cel/macro.go | 576 --
.../github.com/google/cel-go/cel/optimizer.go | 482 --
.../github.com/google/cel-go/cel/options.go | 661 --
.../github.com/google/cel-go/cel/program.go | 542 --
.../github.com/google/cel-go/cel/validator.go | 375 -
.../google/cel-go/checker/BUILD.bazel | 65 -
.../google/cel-go/checker/checker.go | 696 --
.../github.com/google/cel-go/checker/cost.go | 702 --
.../google/cel-go/checker/decls/BUILD.bazel | 19 -
.../google/cel-go/checker/decls/decls.go | 237 -
.../github.com/google/cel-go/checker/env.go | 276 -
.../google/cel-go/checker/errors.go | 88 -
.../google/cel-go/checker/format.go | 216 -
.../google/cel-go/checker/mapping.go | 49 -
.../google/cel-go/checker/options.go | 42 -
.../google/cel-go/checker/printer.go | 74 -
.../google/cel-go/checker/scopes.go | 147 -
.../google/cel-go/checker/standard.go | 35 -
.../github.com/google/cel-go/checker/types.go | 309 -
.../google/cel-go/common/BUILD.bazel | 35 -
.../google/cel-go/common/ast/BUILD.bazel | 61 -
.../google/cel-go/common/ast/ast.go | 450 --
.../google/cel-go/common/ast/conversion.go | 632 --
.../google/cel-go/common/ast/expr.go | 860 ---
.../google/cel-go/common/ast/factory.go | 303 -
.../google/cel-go/common/ast/navigable.go | 652 --
.../cel-go/common/containers/BUILD.bazel | 31 -
.../cel-go/common/containers/container.go | 316 -
.../github.com/google/cel-go/common/cost.go | 40 -
.../google/cel-go/common/debug/BUILD.bazel | 20 -
.../google/cel-go/common/debug/debug.go | 309 -
.../google/cel-go/common/decls/BUILD.bazel | 39 -
.../google/cel-go/common/decls/decls.go | 844 ---
vendor/github.com/google/cel-go/common/doc.go | 17 -
.../github.com/google/cel-go/common/error.go | 79 -
.../github.com/google/cel-go/common/errors.go | 103 -
.../cel-go/common/functions/BUILD.bazel | 17 -
.../cel-go/common/functions/functions.go | 61 -
.../google/cel-go/common/location.go | 51 -
.../cel-go/common/operators/BUILD.bazel | 14 -
.../cel-go/common/operators/operators.go | 157 -
.../cel-go/common/overloads/BUILD.bazel | 14 -
.../cel-go/common/overloads/overloads.go | 327 -
.../google/cel-go/common/runes/BUILD.bazel | 25 -
.../google/cel-go/common/runes/buffer.go | 194 -
.../github.com/google/cel-go/common/source.go | 183 -
.../google/cel-go/common/stdlib/BUILD.bazel | 25 -
.../google/cel-go/common/stdlib/standard.go | 661 --
.../google/cel-go/common/types/BUILD.bazel | 90 -
.../google/cel-go/common/types/any_value.go | 24 -
.../google/cel-go/common/types/bool.go | 141 -
.../google/cel-go/common/types/bytes.go | 130 -
.../google/cel-go/common/types/compare.go | 97 -
.../google/cel-go/common/types/doc.go | 17 -
.../google/cel-go/common/types/double.go | 211 -
.../google/cel-go/common/types/duration.go | 222 -
.../google/cel-go/common/types/err.go | 146 -
.../google/cel-go/common/types/int.go | 291 -
.../google/cel-go/common/types/iterator.go | 55 -
.../google/cel-go/common/types/json_value.go | 29 -
.../google/cel-go/common/types/list.go | 523 --
.../google/cel-go/common/types/map.go | 854 ---
.../google/cel-go/common/types/null.go | 111 -
.../google/cel-go/common/types/object.go | 165 -
.../google/cel-go/common/types/optional.go | 108 -
.../google/cel-go/common/types/overflow.go | 389 -
.../google/cel-go/common/types/pb/BUILD.bazel | 53 -
.../google/cel-go/common/types/pb/checked.go | 93 -
.../google/cel-go/common/types/pb/enum.go | 44 -
.../google/cel-go/common/types/pb/equal.go | 206 -
.../google/cel-go/common/types/pb/file.go | 202 -
.../google/cel-go/common/types/pb/pb.go | 258 -
.../google/cel-go/common/types/pb/type.go | 587 --
.../google/cel-go/common/types/provider.go | 734 --
.../cel-go/common/types/ref/BUILD.bazel | 20 -
.../cel-go/common/types/ref/provider.go | 102 -
.../cel-go/common/types/ref/reference.go | 63 -
.../google/cel-go/common/types/string.go | 229 -
.../google/cel-go/common/types/timestamp.go | 311 -
.../cel-go/common/types/traits/BUILD.bazel | 29 -
.../cel-go/common/types/traits/comparer.go | 33 -
.../cel-go/common/types/traits/container.go | 23 -
.../common/types/traits/field_tester.go | 30 -
.../cel-go/common/types/traits/indexer.go | 25 -
.../cel-go/common/types/traits/iterator.go | 36 -
.../cel-go/common/types/traits/lister.go | 33 -
.../cel-go/common/types/traits/mapper.go | 33 -
.../cel-go/common/types/traits/matcher.go | 23 -
.../google/cel-go/common/types/traits/math.go | 62 -
.../cel-go/common/types/traits/receiver.go | 24 -
.../cel-go/common/types/traits/sizer.go | 25 -
.../cel-go/common/types/traits/traits.go | 64 -
.../cel-go/common/types/traits/zeroer.go | 21 -
.../google/cel-go/common/types/types.go | 806 ---
.../google/cel-go/common/types/uint.go | 244 -
.../google/cel-go/common/types/unknown.go | 326 -
.../google/cel-go/common/types/util.go | 48 -
.../google/cel-go/interpreter/BUILD.bazel | 74 -
.../google/cel-go/interpreter/activation.go | 201 -
.../cel-go/interpreter/attribute_patterns.go | 399 --
.../google/cel-go/interpreter/attributes.go | 1337 ----
.../google/cel-go/interpreter/decorators.go | 272 -
.../google/cel-go/interpreter/dispatcher.go | 100 -
.../google/cel-go/interpreter/evalstate.go | 79 -
.../cel-go/interpreter/functions/BUILD.bazel | 17 -
.../cel-go/interpreter/functions/functions.go | 39 -
.../cel-go/interpreter/interpretable.go | 1262 ----
.../google/cel-go/interpreter/interpreter.go | 185 -
.../cel-go/interpreter/optimizations.go | 46 -
.../google/cel-go/interpreter/planner.go | 756 --
.../google/cel-go/interpreter/prune.go | 543 --
.../google/cel-go/interpreter/runtimecost.go | 316 -
.../google/cel-go/parser/BUILD.bazel | 58 -
.../github.com/google/cel-go/parser/errors.go | 43 -
.../google/cel-go/parser/gen/BUILD.bazel | 26 -
.../google/cel-go/parser/gen/CEL.g4 | 200 -
.../google/cel-go/parser/gen/CEL.interp | 99 -
.../google/cel-go/parser/gen/CEL.tokens | 64 -
.../google/cel-go/parser/gen/CELLexer.interp | 136 -
.../google/cel-go/parser/gen/CELLexer.tokens | 64 -
.../cel-go/parser/gen/cel_base_listener.go | 219 -
.../cel-go/parser/gen/cel_base_visitor.go | 141 -
.../google/cel-go/parser/gen/cel_lexer.go | 344 -
.../google/cel-go/parser/gen/cel_listener.go | 208 -
.../google/cel-go/parser/gen/cel_parser.go | 6274 -----------------
.../google/cel-go/parser/gen/cel_visitor.go | 110 -
.../google/cel-go/parser/gen/doc.go | 16 -
.../google/cel-go/parser/gen/generate.sh | 35 -
.../github.com/google/cel-go/parser/helper.go | 474 --
.../github.com/google/cel-go/parser/input.go | 129 -
.../github.com/google/cel-go/parser/macro.go | 406 --
.../google/cel-go/parser/options.go | 140 -
.../github.com/google/cel-go/parser/parser.go | 1008 ---
.../google/cel-go/parser/unescape.go | 237 -
.../google/cel-go/parser/unparser.go | 629 --
.../github.com/h2non/filetype/.editorconfig | 12 -
vendor/github.com/h2non/filetype/.gitignore | 2 -
vendor/github.com/h2non/filetype/.travis.yml | 16 -
vendor/github.com/h2non/filetype/History.md | 163 -
vendor/github.com/h2non/filetype/LICENSE | 24 -
vendor/github.com/h2non/filetype/README.md | 294 -
vendor/github.com/h2non/filetype/filetype.go | 102 -
vendor/github.com/h2non/filetype/kind.go | 91 -
vendor/github.com/h2non/filetype/match.go | 90 -
.../h2non/filetype/matchers/application.go | 43 -
.../h2non/filetype/matchers/archive.go | 211 -
.../h2non/filetype/matchers/audio.go | 85 -
.../h2non/filetype/matchers/document.go | 197 -
.../h2non/filetype/matchers/font.go | 45 -
.../h2non/filetype/matchers/image.go | 143 -
.../filetype/matchers/isobmff/isobmff.go | 37 -
.../h2non/filetype/matchers/matchers.go | 51 -
.../h2non/filetype/matchers/video.go | 145 -
.../h2non/filetype/types/defaults.go | 4 -
.../github.com/h2non/filetype/types/mime.go | 14 -
.../github.com/h2non/filetype/types/split.go | 11 -
.../github.com/h2non/filetype/types/type.go | 16 -
.../github.com/h2non/filetype/types/types.go | 23 -
vendor/github.com/h2non/filetype/version.go | 4 -
.../github.com/h2non/go-is-svg/.editorconfig | 12 -
vendor/github.com/h2non/go-is-svg/.gitignore | 7 -
vendor/github.com/h2non/go-is-svg/.travis.yml | 23 -
vendor/github.com/h2non/go-is-svg/LICENSE | 24 -
vendor/github.com/h2non/go-is-svg/README.md | 47 -
vendor/github.com/h2non/go-is-svg/svg.go | 36 -
.../gomega/gstruct/errors/nested_types.go | 72 -
.../eco-goinfra/pkg/clients/clients.go | 16 -
.../eco-goinfra/pkg/olm/packagemanifest.go | 102 +-
.../pkg/olm/packagemanifestlist.go | 28 +-
.../metallb/mlboperator/metallb_types.go | 9 +-
.../metallb/mlboperator/metallb_webhook.go | 21 +-
.../olm/package-server}/operators/doc.go | 0
.../operators/packagemanifest_types.go | 19 +-
.../olm/package-server}/operators/register.go | 0
.../olm/package-server}/operators/v1/doc.go | 0
.../operators/v1/packagemanifest_types.go | 19 +-
.../package-server}/operators/v1/register.go | 2 +-
.../operators/v1/zz_generated.conversion.go | 40 +-
.../operators/v1/zz_generated.deepcopy.go | 37 +-
.../operators/v1/zz_generated.defaults.go | 0
.../operators/zz_generated.deepcopy.go | 37 +-
.../api/pkg/constraints/cel.go | 135 -
.../api/pkg/constraints/constraint.go | 87 -
.../api/pkg/operators/v1/doc.go | 4 -
.../api/pkg/operators/v1/groupversion_info.go | 28 -
.../api/pkg/operators/v1/olmconfig_types.go | 90 -
.../api/pkg/operators/v1/operator_types.go | 88 -
.../operators/v1/operatorcondition_types.go | 49 -
.../pkg/operators/v1/operatorgroup_types.go | 214 -
.../pkg/operators/v1/zz_generated.deepcopy.go | 556 --
.../api/pkg/operators/v1alpha2/doc.go | 6 -
.../operators/v1alpha2/groupversion_info.go | 42 -
.../operators/v1alpha2/operatorgroup_types.go | 99 -
.../v1alpha2/zz_generated.deepcopy.go | 139 -
.../api/pkg/operators/v2/doc.go | 4 -
.../api/pkg/operators/v2/groupversion_info.go | 28 -
.../operators/v2/operatorcondition_types.go | 54 -
.../pkg/operators/v2/zz_generated.deepcopy.go | 145 -
.../operator-lifecycle-manager/LICENSE | 201 -
.../client/clientset/versioned/scheme/doc.go | 20 -
.../clientset/versioned/scheme/register.go | 62 -
.../versioned/typed/operators/v1/doc.go | 20 -
.../typed/operators/v1/generated_expansion.go | 27 -
.../versioned/typed/operators/v1/olmconfig.go | 184 -
.../versioned/typed/operators/v1/operator.go | 184 -
.../typed/operators/v1/operatorcondition.go | 195 -
.../typed/operators/v1/operatorgroup.go | 195 -
.../typed/operators/v1/operators_client.go | 122 -
.../typed/operators/v1alpha1/catalogsource.go | 195 -
.../v1alpha1/clusterserviceversion.go | 195 -
.../versioned/typed/operators/v1alpha1/doc.go | 20 -
.../operators/v1alpha1/generated_expansion.go | 27 -
.../typed/operators/v1alpha1/installplan.go | 195 -
.../operators/v1alpha1/operators_client.go | 122 -
.../typed/operators/v1alpha1/subscription.go | 195 -
.../apis/operators/packagemanifest.go | 134 -
.../apis/operators/v1/packagemanifest.go | 97 -
.../client/clientset/versioned/scheme/doc.go | 20 -
.../clientset/versioned/scheme/register.go | 56 -
.../versioned/typed/operators/v1/doc.go | 20 -
.../typed/operators/v1/generated_expansion.go | 21 -
.../typed/operators/v1/operators_client.go | 107 -
.../typed/operators/v1/packagemanifest.go | 195 -
.../operator-registry/LICENSE | 201 -
.../operator-registry/alpha/model/error.go | 66 -
.../operator-registry/alpha/model/model.go | 407 --
.../alpha/property/errors.go | 25 -
.../alpha/property/property.go | 286 -
.../alpha/property/scheme.go | 34 -
.../operator-registry/pkg/api/api_to_model.go | 155 -
.../operator-registry/pkg/api/model_to_api.go | 207 -
.../operator-registry/pkg/api/registry.pb.go | 1734 -----
.../operator-registry/pkg/api/registry.proto | 133 -
.../pkg/api/registry_grpc.pb.go | 582 --
.../operator-registry/pkg/image/mock.go | 85 -
.../operator-registry/pkg/image/reference.go | 16 -
.../operator-registry/pkg/image/registry.go | 31 -
.../pkg/lib/semver/semver.go | 54 -
.../prettyunmarshaler/prettyunmarshaler.go | 93 -
.../operator-registry/pkg/registry/bundle.go | 395 --
.../pkg/registry/bundlegraphloader.go | 160 -
.../pkg/registry/channelupdateoptions.go | 27 -
.../pkg/registry/conversion.go | 97 -
.../operator-registry/pkg/registry/csv.go | 469 --
.../operator-registry/pkg/registry/decode.go | 69 -
.../pkg/registry/directoryGraphLoader.go | 207 -
.../operator-registry/pkg/registry/empty.go | 123 -
.../operator-registry/pkg/registry/graph.go | 79 -
.../pkg/registry/imageinput.go | 30 -
.../pkg/registry/interface.go | 110 -
.../operator-registry/pkg/registry/parse.go | 251 -
.../pkg/registry/populator.go | 464 --
.../pkg/registry/registry_to_model.go | 147 -
.../operator-registry/pkg/registry/types.go | 418 --
.../github.com/stoewer/go-strcase/.gitignore | 17 -
.../stoewer/go-strcase/.golangci.yml | 26 -
vendor/github.com/stoewer/go-strcase/LICENSE | 21 -
.../github.com/stoewer/go-strcase/README.md | 50 -
vendor/github.com/stoewer/go-strcase/camel.go | 40 -
vendor/github.com/stoewer/go-strcase/doc.go | 8 -
.../github.com/stoewer/go-strcase/helper.go | 71 -
vendor/github.com/stoewer/go-strcase/kebab.go | 14 -
vendor/github.com/stoewer/go-strcase/snake.go | 58 -
.../x/net/internal/timeseries/timeseries.go | 525 --
vendor/golang.org/x/net/trace/events.go | 532 --
vendor/golang.org/x/net/trace/histogram.go | 365 -
vendor/golang.org/x/net/trace/trace.go | 1130 ---
vendor/golang.org/x/text/width/kind_string.go | 28 -
.../golang.org/x/text/width/tables10.0.0.go | 1328 ----
.../golang.org/x/text/width/tables11.0.0.go | 1340 ----
.../golang.org/x/text/width/tables12.0.0.go | 1360 ----
.../golang.org/x/text/width/tables13.0.0.go | 1361 ----
.../golang.org/x/text/width/tables15.0.0.go | 1367 ----
vendor/golang.org/x/text/width/tables9.0.0.go | 1296 ----
vendor/golang.org/x/text/width/transform.go | 239 -
vendor/golang.org/x/text/width/trieval.go | 30 -
vendor/golang.org/x/text/width/width.go | 206 -
.../genproto/googleapis/api/LICENSE | 202 -
.../api/expr/v1alpha1/checked.pb.go | 1664 -----
.../googleapis/api/expr/v1alpha1/eval.pb.go | 580 --
.../api/expr/v1alpha1/explain.pb.go | 275 -
.../googleapis/api/expr/v1alpha1/syntax.pb.go | 1990 ------
.../googleapis/api/expr/v1alpha1/value.pb.go | 721 --
.../genproto/googleapis/rpc/LICENSE | 202 -
.../googleapis/rpc/status/status.pb.go | 203 -
vendor/google.golang.org/grpc/AUTHORS | 1 -
.../google.golang.org/grpc/CODE-OF-CONDUCT.md | 3 -
vendor/google.golang.org/grpc/CONTRIBUTING.md | 73 -
vendor/google.golang.org/grpc/GOVERNANCE.md | 1 -
vendor/google.golang.org/grpc/LICENSE | 202 -
vendor/google.golang.org/grpc/MAINTAINERS.md | 28 -
vendor/google.golang.org/grpc/Makefile | 46 -
vendor/google.golang.org/grpc/NOTICE.txt | 13 -
vendor/google.golang.org/grpc/README.md | 107 -
vendor/google.golang.org/grpc/SECURITY.md | 3 -
.../grpc/attributes/attributes.go | 141 -
vendor/google.golang.org/grpc/backoff.go | 61 -
.../google.golang.org/grpc/backoff/backoff.go | 52 -
.../grpc/balancer/balancer.go | 443 --
.../grpc/balancer/base/balancer.go | 264 -
.../grpc/balancer/base/base.go | 71 -
.../grpc/balancer/conn_state_evaluator.go | 74 -
.../grpc/balancer/grpclb/state/state.go | 51 -
.../grpc/balancer/roundrobin/roundrobin.go | 81 -
.../grpc/balancer_wrapper.go | 337 -
.../grpc_binarylog_v1/binarylog.pb.go | 1183 ----
vendor/google.golang.org/grpc/call.go | 74 -
.../grpc/channelz/channelz.go | 36 -
vendor/google.golang.org/grpc/clientconn.go | 1827 -----
vendor/google.golang.org/grpc/codec.go | 50 -
vendor/google.golang.org/grpc/codegen.sh | 17 -
.../grpc/codes/code_string.go | 111 -
vendor/google.golang.org/grpc/codes/codes.go | 250 -
.../grpc/connectivity/connectivity.go | 94 -
.../grpc/credentials/credentials.go | 291 -
.../grpc/credentials/insecure/insecure.go | 98 -
.../google.golang.org/grpc/credentials/tls.go | 251 -
vendor/google.golang.org/grpc/dialoptions.go | 726 --
vendor/google.golang.org/grpc/doc.go | 26 -
.../grpc/encoding/encoding.go | 130 -
.../grpc/encoding/proto/proto.go | 72 -
.../grpc/grpclog/component.go | 117 -
.../google.golang.org/grpc/grpclog/grpclog.go | 132 -
.../google.golang.org/grpc/grpclog/logger.go | 87 -
.../grpc/grpclog/loggerv2.go | 258 -
vendor/google.golang.org/grpc/interceptor.go | 104 -
.../grpc/internal/backoff/backoff.go | 109 -
.../balancer/gracefulswitch/config.go | 83 -
.../balancer/gracefulswitch/gracefulswitch.go | 420 --
.../grpc/internal/balancerload/load.go | 46 -
.../grpc/internal/binarylog/binarylog.go | 192 -
.../internal/binarylog/binarylog_testutil.go | 42 -
.../grpc/internal/binarylog/env_config.go | 208 -
.../grpc/internal/binarylog/method_logger.go | 446 --
.../grpc/internal/binarylog/sink.go | 170 -
.../grpc/internal/buffer/unbounded.go | 116 -
.../grpc/internal/channelz/channel.go | 255 -
.../grpc/internal/channelz/channelmap.go | 402 --
.../grpc/internal/channelz/funcs.go | 230 -
.../grpc/internal/channelz/logging.go | 75 -
.../grpc/internal/channelz/server.go | 119 -
.../grpc/internal/channelz/socket.go | 130 -
.../grpc/internal/channelz/subchannel.go | 151 -
.../grpc/internal/channelz/syscall_linux.go | 65 -
.../internal/channelz/syscall_nonlinux.go | 47 -
.../grpc/internal/channelz/trace.go | 204 -
.../grpc/internal/credentials/credentials.go | 49 -
.../grpc/internal/credentials/spiffe.go | 75 -
.../grpc/internal/credentials/syscallconn.go | 58 -
.../grpc/internal/credentials/util.go | 52 -
.../grpc/internal/envconfig/envconfig.go | 69 -
.../grpc/internal/envconfig/observability.go | 42 -
.../grpc/internal/envconfig/xds.go | 56 -
.../grpc/internal/experimental.go | 28 -
.../grpc/internal/grpclog/grpclog.go | 126 -
.../grpc/internal/grpclog/prefixLogger.go | 93 -
.../grpc/internal/grpcrand/grpcrand.go | 100 -
.../grpc/internal/grpcrand/grpcrand_go1.21.go | 73 -
.../internal/grpcsync/callback_serializer.go | 100 -
.../grpc/internal/grpcsync/event.go | 61 -
.../grpc/internal/grpcsync/oncefunc.go | 32 -
.../grpc/internal/grpcsync/pubsub.go | 121 -
.../grpc/internal/grpcutil/compressor.go | 47 -
.../grpc/internal/grpcutil/encode_duration.go | 63 -
.../grpc/internal/grpcutil/grpcutil.go | 20 -
.../grpc/internal/grpcutil/metadata.go | 40 -
.../grpc/internal/grpcutil/method.go | 88 -
.../grpc/internal/grpcutil/regex.go | 31 -
.../grpc/internal/idle/idle.go | 278 -
.../grpc/internal/internal.go | 230 -
.../grpc/internal/metadata/metadata.go | 132 -
.../grpc/internal/pretty/pretty.go | 73 -
.../grpc/internal/resolver/config_selector.go | 167 -
.../internal/resolver/dns/dns_resolver.go | 450 --
.../resolver/dns/internal/internal.go | 70 -
.../resolver/passthrough/passthrough.go | 64 -
.../grpc/internal/resolver/unix/unix.go | 78 -
.../grpc/internal/serviceconfig/duration.go | 130 -
.../internal/serviceconfig/serviceconfig.go | 180 -
.../grpc/internal/status/status.go | 205 -
.../grpc/internal/syscall/syscall_linux.go | 112 -
.../grpc/internal/syscall/syscall_nonlinux.go | 77 -
.../grpc/internal/tcp_keepalive_others.go | 29 -
.../grpc/internal/tcp_keepalive_unix.go | 54 -
.../grpc/internal/tcp_keepalive_windows.go | 54 -
.../grpc/internal/transport/bdp_estimator.go | 141 -
.../grpc/internal/transport/controlbuf.go | 1006 ---
.../grpc/internal/transport/defaults.go | 55 -
.../grpc/internal/transport/flowcontrol.go | 215 -
.../grpc/internal/transport/handler_server.go | 489 --
.../grpc/internal/transport/http2_client.go | 1788 -----
.../grpc/internal/transport/http2_server.go | 1460 ----
.../grpc/internal/transport/http_util.go | 464 --
.../grpc/internal/transport/logging.go | 40 -
.../transport/networktype/networktype.go | 46 -
.../grpc/internal/transport/proxy.go | 144 -
.../grpc/internal/transport/transport.go | 832 ---
.../grpc/keepalive/keepalive.go | 85 -
.../grpc/metadata/metadata.go | 300 -
vendor/google.golang.org/grpc/peer/peer.go | 53 -
.../google.golang.org/grpc/picker_wrapper.go | 223 -
vendor/google.golang.org/grpc/pickfirst.go | 241 -
vendor/google.golang.org/grpc/preloader.go | 67 -
vendor/google.golang.org/grpc/regenerate.sh | 123 -
.../grpc/resolver/dns/dns_resolver.go | 54 -
vendor/google.golang.org/grpc/resolver/map.go | 251 -
.../grpc/resolver/resolver.go | 332 -
.../grpc/resolver_wrapper.go | 198 -
vendor/google.golang.org/grpc/rpc_util.go | 981 ---
vendor/google.golang.org/grpc/server.go | 2183 ------
.../google.golang.org/grpc/service_config.go | 354 -
.../grpc/serviceconfig/serviceconfig.go | 44 -
.../grpc/shared_buffer_pool.go | 154 -
.../google.golang.org/grpc/stats/handlers.go | 63 -
vendor/google.golang.org/grpc/stats/stats.go | 343 -
.../google.golang.org/grpc/status/status.go | 162 -
vendor/google.golang.org/grpc/stream.go | 1781 -----
vendor/google.golang.org/grpc/tap/tap.go | 62 -
vendor/google.golang.org/grpc/trace.go | 143 -
.../google.golang.org/grpc/trace_notrace.go | 52 -
.../google.golang.org/grpc/trace_withtrace.go | 39 -
vendor/google.golang.org/grpc/version.go | 22 -
vendor/google.golang.org/grpc/vet.sh | 195 -
.../protobuf/encoding/protojson/decode.go | 685 --
.../protobuf/encoding/protojson/doc.go | 11 -
.../protobuf/encoding/protojson/encode.go | 382 -
.../encoding/protojson/well_known_types.go | 876 ---
.../protobuf/internal/encoding/json/decode.go | 340 -
.../internal/encoding/json/decode_number.go | 254 -
.../internal/encoding/json/decode_string.go | 91 -
.../internal/encoding/json/decode_token.go | 192 -
.../protobuf/internal/encoding/json/encode.go | 278 -
.../protobuf/protoadapt/convert.go | 31 -
.../protobuf/types/dynamicpb/dynamic.go | 718 --
.../protobuf/types/dynamicpb/types.go | 184 -
.../types/known/durationpb/duration.pb.go | 374 -
.../protobuf/types/known/emptypb/empty.pb.go | 166 -
.../types/known/structpb/struct.pb.go | 810 ---
.../types/known/wrapperspb/wrappers.pb.go | 760 --
vendor/modules.txt | 135 +-
506 files changed, 144 insertions(+), 124153 deletions(-)
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/.gitignore
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/LICENSE
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/README.md
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_config.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_state.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_type.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/char_stream.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/comparators.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/configuration.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/dfa.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/dfa_state.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/error_listener.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/error_strategy.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/errors.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/file_stream.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/input_stream.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/int_stream.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/interval_set.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/jcollect.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/lexer.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/lexer_action.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/nostatistics.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/parser.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/prediction_context.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/recognizer.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/rule_context.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/semantic_context.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/statistics.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/stats_data.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/token.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/token_source.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/token_stream.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/trace_listener.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/transition.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/tree.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/trees.go
delete mode 100644 vendor/github.com/antlr4-go/antlr/v4/utils.go
delete mode 100644 vendor/github.com/google/cel-go/LICENSE
delete mode 100644 vendor/github.com/google/cel-go/cel/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/cel/cel.go
delete mode 100644 vendor/github.com/google/cel-go/cel/decls.go
delete mode 100644 vendor/github.com/google/cel-go/cel/env.go
delete mode 100644 vendor/github.com/google/cel-go/cel/folding.go
delete mode 100644 vendor/github.com/google/cel-go/cel/inlining.go
delete mode 100644 vendor/github.com/google/cel-go/cel/io.go
delete mode 100644 vendor/github.com/google/cel-go/cel/library.go
delete mode 100644 vendor/github.com/google/cel-go/cel/macro.go
delete mode 100644 vendor/github.com/google/cel-go/cel/optimizer.go
delete mode 100644 vendor/github.com/google/cel-go/cel/options.go
delete mode 100644 vendor/github.com/google/cel-go/cel/program.go
delete mode 100644 vendor/github.com/google/cel-go/cel/validator.go
delete mode 100644 vendor/github.com/google/cel-go/checker/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/checker/checker.go
delete mode 100644 vendor/github.com/google/cel-go/checker/cost.go
delete mode 100644 vendor/github.com/google/cel-go/checker/decls/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/checker/decls/decls.go
delete mode 100644 vendor/github.com/google/cel-go/checker/env.go
delete mode 100644 vendor/github.com/google/cel-go/checker/errors.go
delete mode 100644 vendor/github.com/google/cel-go/checker/format.go
delete mode 100644 vendor/github.com/google/cel-go/checker/mapping.go
delete mode 100644 vendor/github.com/google/cel-go/checker/options.go
delete mode 100644 vendor/github.com/google/cel-go/checker/printer.go
delete mode 100644 vendor/github.com/google/cel-go/checker/scopes.go
delete mode 100644 vendor/github.com/google/cel-go/checker/standard.go
delete mode 100644 vendor/github.com/google/cel-go/checker/types.go
delete mode 100644 vendor/github.com/google/cel-go/common/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/ast/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/ast/ast.go
delete mode 100644 vendor/github.com/google/cel-go/common/ast/conversion.go
delete mode 100644 vendor/github.com/google/cel-go/common/ast/expr.go
delete mode 100644 vendor/github.com/google/cel-go/common/ast/factory.go
delete mode 100644 vendor/github.com/google/cel-go/common/ast/navigable.go
delete mode 100644 vendor/github.com/google/cel-go/common/containers/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/containers/container.go
delete mode 100644 vendor/github.com/google/cel-go/common/cost.go
delete mode 100644 vendor/github.com/google/cel-go/common/debug/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/debug/debug.go
delete mode 100644 vendor/github.com/google/cel-go/common/decls/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/decls/decls.go
delete mode 100644 vendor/github.com/google/cel-go/common/doc.go
delete mode 100644 vendor/github.com/google/cel-go/common/error.go
delete mode 100644 vendor/github.com/google/cel-go/common/errors.go
delete mode 100644 vendor/github.com/google/cel-go/common/functions/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/functions/functions.go
delete mode 100644 vendor/github.com/google/cel-go/common/location.go
delete mode 100644 vendor/github.com/google/cel-go/common/operators/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/operators/operators.go
delete mode 100644 vendor/github.com/google/cel-go/common/overloads/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/overloads/overloads.go
delete mode 100644 vendor/github.com/google/cel-go/common/runes/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/runes/buffer.go
delete mode 100644 vendor/github.com/google/cel-go/common/source.go
delete mode 100644 vendor/github.com/google/cel-go/common/stdlib/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/stdlib/standard.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/types/any_value.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/bool.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/bytes.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/compare.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/doc.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/double.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/duration.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/err.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/int.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/iterator.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/json_value.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/list.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/map.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/null.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/object.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/optional.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/overflow.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/pb/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/types/pb/checked.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/pb/enum.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/pb/equal.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/pb/file.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/pb/pb.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/pb/type.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/provider.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/ref/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/types/ref/provider.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/ref/reference.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/string.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/timestamp.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/comparer.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/container.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/field_tester.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/indexer.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/iterator.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/lister.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/mapper.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/matcher.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/math.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/receiver.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/sizer.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/traits.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/traits/zeroer.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/types.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/uint.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/unknown.go
delete mode 100644 vendor/github.com/google/cel-go/common/types/util.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/interpreter/activation.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/attribute_patterns.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/attributes.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/decorators.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/dispatcher.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/evalstate.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/functions/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/interpreter/functions/functions.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/interpretable.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/interpreter.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/optimizations.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/planner.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/prune.go
delete mode 100644 vendor/github.com/google/cel-go/interpreter/runtimecost.go
delete mode 100644 vendor/github.com/google/cel-go/parser/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/parser/errors.go
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/BUILD.bazel
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/CEL.g4
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/CEL.interp
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/CEL.tokens
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/CELLexer.interp
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/CELLexer.tokens
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/cel_base_listener.go
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/cel_base_visitor.go
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/cel_lexer.go
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/cel_listener.go
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/cel_parser.go
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/cel_visitor.go
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/doc.go
delete mode 100644 vendor/github.com/google/cel-go/parser/gen/generate.sh
delete mode 100644 vendor/github.com/google/cel-go/parser/helper.go
delete mode 100644 vendor/github.com/google/cel-go/parser/input.go
delete mode 100644 vendor/github.com/google/cel-go/parser/macro.go
delete mode 100644 vendor/github.com/google/cel-go/parser/options.go
delete mode 100644 vendor/github.com/google/cel-go/parser/parser.go
delete mode 100644 vendor/github.com/google/cel-go/parser/unescape.go
delete mode 100644 vendor/github.com/google/cel-go/parser/unparser.go
delete mode 100644 vendor/github.com/h2non/filetype/.editorconfig
delete mode 100644 vendor/github.com/h2non/filetype/.gitignore
delete mode 100644 vendor/github.com/h2non/filetype/.travis.yml
delete mode 100644 vendor/github.com/h2non/filetype/History.md
delete mode 100644 vendor/github.com/h2non/filetype/LICENSE
delete mode 100644 vendor/github.com/h2non/filetype/README.md
delete mode 100644 vendor/github.com/h2non/filetype/filetype.go
delete mode 100644 vendor/github.com/h2non/filetype/kind.go
delete mode 100644 vendor/github.com/h2non/filetype/match.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/application.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/archive.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/audio.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/document.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/font.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/image.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/isobmff/isobmff.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/matchers.go
delete mode 100644 vendor/github.com/h2non/filetype/matchers/video.go
delete mode 100644 vendor/github.com/h2non/filetype/types/defaults.go
delete mode 100644 vendor/github.com/h2non/filetype/types/mime.go
delete mode 100644 vendor/github.com/h2non/filetype/types/split.go
delete mode 100644 vendor/github.com/h2non/filetype/types/type.go
delete mode 100644 vendor/github.com/h2non/filetype/types/types.go
delete mode 100644 vendor/github.com/h2non/filetype/version.go
delete mode 100644 vendor/github.com/h2non/go-is-svg/.editorconfig
delete mode 100644 vendor/github.com/h2non/go-is-svg/.gitignore
delete mode 100644 vendor/github.com/h2non/go-is-svg/.travis.yml
delete mode 100644 vendor/github.com/h2non/go-is-svg/LICENSE
delete mode 100644 vendor/github.com/h2non/go-is-svg/README.md
delete mode 100644 vendor/github.com/h2non/go-is-svg/svg.go
delete mode 100644 vendor/github.com/onsi/gomega/gstruct/errors/nested_types.go
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/doc.go (100%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/packagemanifest_types.go (86%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/register.go (100%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/v1/doc.go (100%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/v1/packagemanifest_types.go (87%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/v1/register.go (95%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/v1/zz_generated.conversion.go (90%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/v1/zz_generated.deepcopy.go (89%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/v1/zz_generated.defaults.go (100%)
rename vendor/github.com/{operator-framework/operator-lifecycle-manager/pkg/package-server/apis => openshift-kni/eco-goinfra/pkg/schemes/olm/package-server}/operators/zz_generated.deepcopy.go (89%)
delete mode 100644 vendor/github.com/operator-framework/api/pkg/constraints/cel.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/constraints/constraint.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1/doc.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1/groupversion_info.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1/olmconfig_types.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1/operator_types.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1/operatorcondition_types.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1/operatorgroup_types.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1/zz_generated.deepcopy.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1alpha2/doc.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1alpha2/groupversion_info.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1alpha2/operatorgroup_types.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v1alpha2/zz_generated.deepcopy.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v2/doc.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v2/groupversion_info.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v2/operatorcondition_types.go
delete mode 100644 vendor/github.com/operator-framework/api/pkg/operators/v2/zz_generated.deepcopy.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/LICENSE
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/scheme/doc.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/scheme/register.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1/doc.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1/generated_expansion.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1/olmconfig.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1/operator.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1/operatorcondition.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1/operatorgroup.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1/operators_client.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1alpha1/catalogsource.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1alpha1/clusterserviceversion.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1alpha1/doc.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1alpha1/generated_expansion.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1alpha1/installplan.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1alpha1/operators_client.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned/typed/operators/v1alpha1/subscription.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/package-server/apis/operators/packagemanifest.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/package-server/apis/operators/v1/packagemanifest.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/package-server/client/clientset/versioned/scheme/doc.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/package-server/client/clientset/versioned/scheme/register.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/package-server/client/clientset/versioned/typed/operators/v1/doc.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/package-server/client/clientset/versioned/typed/operators/v1/generated_expansion.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/package-server/client/clientset/versioned/typed/operators/v1/operators_client.go
delete mode 100644 vendor/github.com/operator-framework/operator-lifecycle-manager/pkg/package-server/client/clientset/versioned/typed/operators/v1/packagemanifest.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/LICENSE
delete mode 100644 vendor/github.com/operator-framework/operator-registry/alpha/model/error.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/alpha/model/model.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/alpha/property/errors.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/alpha/property/property.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/alpha/property/scheme.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/api/api_to_model.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/api/model_to_api.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/api/registry.pb.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/api/registry.proto
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/api/registry_grpc.pb.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/image/mock.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/image/reference.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/image/registry.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/lib/semver/semver.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/prettyunmarshaler/prettyunmarshaler.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/bundle.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/bundlegraphloader.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/channelupdateoptions.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/conversion.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/csv.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/decode.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/directoryGraphLoader.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/empty.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/graph.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/imageinput.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/interface.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/parse.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/populator.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/registry_to_model.go
delete mode 100644 vendor/github.com/operator-framework/operator-registry/pkg/registry/types.go
delete mode 100644 vendor/github.com/stoewer/go-strcase/.gitignore
delete mode 100644 vendor/github.com/stoewer/go-strcase/.golangci.yml
delete mode 100644 vendor/github.com/stoewer/go-strcase/LICENSE
delete mode 100644 vendor/github.com/stoewer/go-strcase/README.md
delete mode 100644 vendor/github.com/stoewer/go-strcase/camel.go
delete mode 100644 vendor/github.com/stoewer/go-strcase/doc.go
delete mode 100644 vendor/github.com/stoewer/go-strcase/helper.go
delete mode 100644 vendor/github.com/stoewer/go-strcase/kebab.go
delete mode 100644 vendor/github.com/stoewer/go-strcase/snake.go
delete mode 100644 vendor/golang.org/x/net/internal/timeseries/timeseries.go
delete mode 100644 vendor/golang.org/x/net/trace/events.go
delete mode 100644 vendor/golang.org/x/net/trace/histogram.go
delete mode 100644 vendor/golang.org/x/net/trace/trace.go
delete mode 100644 vendor/golang.org/x/text/width/kind_string.go
delete mode 100644 vendor/golang.org/x/text/width/tables10.0.0.go
delete mode 100644 vendor/golang.org/x/text/width/tables11.0.0.go
delete mode 100644 vendor/golang.org/x/text/width/tables12.0.0.go
delete mode 100644 vendor/golang.org/x/text/width/tables13.0.0.go
delete mode 100644 vendor/golang.org/x/text/width/tables15.0.0.go
delete mode 100644 vendor/golang.org/x/text/width/tables9.0.0.go
delete mode 100644 vendor/golang.org/x/text/width/transform.go
delete mode 100644 vendor/golang.org/x/text/width/trieval.go
delete mode 100644 vendor/golang.org/x/text/width/width.go
delete mode 100644 vendor/google.golang.org/genproto/googleapis/api/LICENSE
delete mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/checked.pb.go
delete mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/eval.pb.go
delete mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/explain.pb.go
delete mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/syntax.pb.go
delete mode 100644 vendor/google.golang.org/genproto/googleapis/api/expr/v1alpha1/value.pb.go
delete mode 100644 vendor/google.golang.org/genproto/googleapis/rpc/LICENSE
delete mode 100644 vendor/google.golang.org/genproto/googleapis/rpc/status/status.pb.go
delete mode 100644 vendor/google.golang.org/grpc/AUTHORS
delete mode 100644 vendor/google.golang.org/grpc/CODE-OF-CONDUCT.md
delete mode 100644 vendor/google.golang.org/grpc/CONTRIBUTING.md
delete mode 100644 vendor/google.golang.org/grpc/GOVERNANCE.md
delete mode 100644 vendor/google.golang.org/grpc/LICENSE
delete mode 100644 vendor/google.golang.org/grpc/MAINTAINERS.md
delete mode 100644 vendor/google.golang.org/grpc/Makefile
delete mode 100644 vendor/google.golang.org/grpc/NOTICE.txt
delete mode 100644 vendor/google.golang.org/grpc/README.md
delete mode 100644 vendor/google.golang.org/grpc/SECURITY.md
delete mode 100644 vendor/google.golang.org/grpc/attributes/attributes.go
delete mode 100644 vendor/google.golang.org/grpc/backoff.go
delete mode 100644 vendor/google.golang.org/grpc/backoff/backoff.go
delete mode 100644 vendor/google.golang.org/grpc/balancer/balancer.go
delete mode 100644 vendor/google.golang.org/grpc/balancer/base/balancer.go
delete mode 100644 vendor/google.golang.org/grpc/balancer/base/base.go
delete mode 100644 vendor/google.golang.org/grpc/balancer/conn_state_evaluator.go
delete mode 100644 vendor/google.golang.org/grpc/balancer/grpclb/state/state.go
delete mode 100644 vendor/google.golang.org/grpc/balancer/roundrobin/roundrobin.go
delete mode 100644 vendor/google.golang.org/grpc/balancer_wrapper.go
delete mode 100644 vendor/google.golang.org/grpc/binarylog/grpc_binarylog_v1/binarylog.pb.go
delete mode 100644 vendor/google.golang.org/grpc/call.go
delete mode 100644 vendor/google.golang.org/grpc/channelz/channelz.go
delete mode 100644 vendor/google.golang.org/grpc/clientconn.go
delete mode 100644 vendor/google.golang.org/grpc/codec.go
delete mode 100644 vendor/google.golang.org/grpc/codegen.sh
delete mode 100644 vendor/google.golang.org/grpc/codes/code_string.go
delete mode 100644 vendor/google.golang.org/grpc/codes/codes.go
delete mode 100644 vendor/google.golang.org/grpc/connectivity/connectivity.go
delete mode 100644 vendor/google.golang.org/grpc/credentials/credentials.go
delete mode 100644 vendor/google.golang.org/grpc/credentials/insecure/insecure.go
delete mode 100644 vendor/google.golang.org/grpc/credentials/tls.go
delete mode 100644 vendor/google.golang.org/grpc/dialoptions.go
delete mode 100644 vendor/google.golang.org/grpc/doc.go
delete mode 100644 vendor/google.golang.org/grpc/encoding/encoding.go
delete mode 100644 vendor/google.golang.org/grpc/encoding/proto/proto.go
delete mode 100644 vendor/google.golang.org/grpc/grpclog/component.go
delete mode 100644 vendor/google.golang.org/grpc/grpclog/grpclog.go
delete mode 100644 vendor/google.golang.org/grpc/grpclog/logger.go
delete mode 100644 vendor/google.golang.org/grpc/grpclog/loggerv2.go
delete mode 100644 vendor/google.golang.org/grpc/interceptor.go
delete mode 100644 vendor/google.golang.org/grpc/internal/backoff/backoff.go
delete mode 100644 vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/config.go
delete mode 100644 vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/gracefulswitch.go
delete mode 100644 vendor/google.golang.org/grpc/internal/balancerload/load.go
delete mode 100644 vendor/google.golang.org/grpc/internal/binarylog/binarylog.go
delete mode 100644 vendor/google.golang.org/grpc/internal/binarylog/binarylog_testutil.go
delete mode 100644 vendor/google.golang.org/grpc/internal/binarylog/env_config.go
delete mode 100644 vendor/google.golang.org/grpc/internal/binarylog/method_logger.go
delete mode 100644 vendor/google.golang.org/grpc/internal/binarylog/sink.go
delete mode 100644 vendor/google.golang.org/grpc/internal/buffer/unbounded.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/channel.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/channelmap.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/funcs.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/logging.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/server.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/socket.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/subchannel.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/syscall_linux.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/syscall_nonlinux.go
delete mode 100644 vendor/google.golang.org/grpc/internal/channelz/trace.go
delete mode 100644 vendor/google.golang.org/grpc/internal/credentials/credentials.go
delete mode 100644 vendor/google.golang.org/grpc/internal/credentials/spiffe.go
delete mode 100644 vendor/google.golang.org/grpc/internal/credentials/syscallconn.go
delete mode 100644 vendor/google.golang.org/grpc/internal/credentials/util.go
delete mode 100644 vendor/google.golang.org/grpc/internal/envconfig/envconfig.go
delete mode 100644 vendor/google.golang.org/grpc/internal/envconfig/observability.go
delete mode 100644 vendor/google.golang.org/grpc/internal/envconfig/xds.go
delete mode 100644 vendor/google.golang.org/grpc/internal/experimental.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpclog/grpclog.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpclog/prefixLogger.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcrand/grpcrand.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcrand/grpcrand_go1.21.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcsync/event.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcsync/oncefunc.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcsync/pubsub.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcutil/compressor.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcutil/encode_duration.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcutil/grpcutil.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcutil/metadata.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcutil/method.go
delete mode 100644 vendor/google.golang.org/grpc/internal/grpcutil/regex.go
delete mode 100644 vendor/google.golang.org/grpc/internal/idle/idle.go
delete mode 100644 vendor/google.golang.org/grpc/internal/internal.go
delete mode 100644 vendor/google.golang.org/grpc/internal/metadata/metadata.go
delete mode 100644 vendor/google.golang.org/grpc/internal/pretty/pretty.go
delete mode 100644 vendor/google.golang.org/grpc/internal/resolver/config_selector.go
delete mode 100644 vendor/google.golang.org/grpc/internal/resolver/dns/dns_resolver.go
delete mode 100644 vendor/google.golang.org/grpc/internal/resolver/dns/internal/internal.go
delete mode 100644 vendor/google.golang.org/grpc/internal/resolver/passthrough/passthrough.go
delete mode 100644 vendor/google.golang.org/grpc/internal/resolver/unix/unix.go
delete mode 100644 vendor/google.golang.org/grpc/internal/serviceconfig/duration.go
delete mode 100644 vendor/google.golang.org/grpc/internal/serviceconfig/serviceconfig.go
delete mode 100644 vendor/google.golang.org/grpc/internal/status/status.go
delete mode 100644 vendor/google.golang.org/grpc/internal/syscall/syscall_linux.go
delete mode 100644 vendor/google.golang.org/grpc/internal/syscall/syscall_nonlinux.go
delete mode 100644 vendor/google.golang.org/grpc/internal/tcp_keepalive_others.go
delete mode 100644 vendor/google.golang.org/grpc/internal/tcp_keepalive_unix.go
delete mode 100644 vendor/google.golang.org/grpc/internal/tcp_keepalive_windows.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/bdp_estimator.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/controlbuf.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/defaults.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/flowcontrol.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/handler_server.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/http2_client.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/http2_server.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/http_util.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/logging.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/networktype/networktype.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/proxy.go
delete mode 100644 vendor/google.golang.org/grpc/internal/transport/transport.go
delete mode 100644 vendor/google.golang.org/grpc/keepalive/keepalive.go
delete mode 100644 vendor/google.golang.org/grpc/metadata/metadata.go
delete mode 100644 vendor/google.golang.org/grpc/peer/peer.go
delete mode 100644 vendor/google.golang.org/grpc/picker_wrapper.go
delete mode 100644 vendor/google.golang.org/grpc/pickfirst.go
delete mode 100644 vendor/google.golang.org/grpc/preloader.go
delete mode 100644 vendor/google.golang.org/grpc/regenerate.sh
delete mode 100644 vendor/google.golang.org/grpc/resolver/dns/dns_resolver.go
delete mode 100644 vendor/google.golang.org/grpc/resolver/map.go
delete mode 100644 vendor/google.golang.org/grpc/resolver/resolver.go
delete mode 100644 vendor/google.golang.org/grpc/resolver_wrapper.go
delete mode 100644 vendor/google.golang.org/grpc/rpc_util.go
delete mode 100644 vendor/google.golang.org/grpc/server.go
delete mode 100644 vendor/google.golang.org/grpc/service_config.go
delete mode 100644 vendor/google.golang.org/grpc/serviceconfig/serviceconfig.go
delete mode 100644 vendor/google.golang.org/grpc/shared_buffer_pool.go
delete mode 100644 vendor/google.golang.org/grpc/stats/handlers.go
delete mode 100644 vendor/google.golang.org/grpc/stats/stats.go
delete mode 100644 vendor/google.golang.org/grpc/status/status.go
delete mode 100644 vendor/google.golang.org/grpc/stream.go
delete mode 100644 vendor/google.golang.org/grpc/tap/tap.go
delete mode 100644 vendor/google.golang.org/grpc/trace.go
delete mode 100644 vendor/google.golang.org/grpc/trace_notrace.go
delete mode 100644 vendor/google.golang.org/grpc/trace_withtrace.go
delete mode 100644 vendor/google.golang.org/grpc/version.go
delete mode 100644 vendor/google.golang.org/grpc/vet.sh
delete mode 100644 vendor/google.golang.org/protobuf/encoding/protojson/decode.go
delete mode 100644 vendor/google.golang.org/protobuf/encoding/protojson/doc.go
delete mode 100644 vendor/google.golang.org/protobuf/encoding/protojson/encode.go
delete mode 100644 vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go
delete mode 100644 vendor/google.golang.org/protobuf/internal/encoding/json/decode.go
delete mode 100644 vendor/google.golang.org/protobuf/internal/encoding/json/decode_number.go
delete mode 100644 vendor/google.golang.org/protobuf/internal/encoding/json/decode_string.go
delete mode 100644 vendor/google.golang.org/protobuf/internal/encoding/json/decode_token.go
delete mode 100644 vendor/google.golang.org/protobuf/internal/encoding/json/encode.go
delete mode 100644 vendor/google.golang.org/protobuf/protoadapt/convert.go
delete mode 100644 vendor/google.golang.org/protobuf/types/dynamicpb/dynamic.go
delete mode 100644 vendor/google.golang.org/protobuf/types/dynamicpb/types.go
delete mode 100644 vendor/google.golang.org/protobuf/types/known/durationpb/duration.pb.go
delete mode 100644 vendor/google.golang.org/protobuf/types/known/emptypb/empty.pb.go
delete mode 100644 vendor/google.golang.org/protobuf/types/known/structpb/struct.pb.go
delete mode 100644 vendor/google.golang.org/protobuf/types/known/wrapperspb/wrappers.pb.go
diff --git a/go.mod b/go.mod
index 8de55d04b..e98360572 100644
--- a/go.mod
+++ b/go.mod
@@ -17,7 +17,7 @@ require (
github.com/nmstate/kubernetes-nmstate/api v0.0.0-20240605150941-df565dd7bf35
github.com/onsi/ginkgo/v2 v2.19.0
github.com/onsi/gomega v1.33.1
- github.com/openshift-kni/eco-goinfra v0.0.0-20240809133315-01bbbf4c9ede // latest
+ github.com/openshift-kni/eco-goinfra v0.0.0-20240809150049-0634a7a9fb27 // latest
github.com/openshift-kni/k8sreporter v1.0.5
github.com/openshift/api v3.9.1-0.20191111211345-a27ff30ebf09+incompatible
github.com/openshift/cluster-nfd-operator v0.0.0-20240604082319-19bf50784aa7
@@ -59,7 +59,6 @@ require (
require (
github.com/PaesslerAG/gval v1.0.0 // indirect
github.com/PaesslerAG/jsonpath v0.1.1 // indirect
- github.com/antlr4-go/antlr/v4 v4.13.0 // indirect
github.com/expr-lang/expr v1.16.5 // indirect
github.com/go-jose/go-jose/v4 v4.0.1 // indirect
github.com/go-task/slim-sprig/v3 v3.0.0 // indirect
@@ -134,7 +133,6 @@ require (
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/google/btree v1.1.2 // indirect
- github.com/google/cel-go v0.18.2 // indirect
github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49 // indirect
github.com/google/go-cmp v0.6.0 // indirect
github.com/google/gofuzz v1.2.0 // indirect
@@ -143,8 +141,6 @@ require (
github.com/google/uuid v1.6.0 // indirect
github.com/grafana-operator/grafana-operator/v4 v4.10.1 // indirect
github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 // indirect
- github.com/h2non/filetype v1.1.3 // indirect
- github.com/h2non/go-is-svg v0.0.0-20160927212452-35e8c4b0612c // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
@@ -185,8 +181,6 @@ require (
github.com/openshift/custom-resource-status v1.1.3-0.20220503160415-f2fdb4999d87 // indirect
github.com/openshift/library-go v0.0.0-20240419113445-f1541d628746 // indirect
github.com/openshift/ptp-operator v0.0.0-20240404165119-29a3d7b3d60b // indirect
- github.com/operator-framework/operator-lifecycle-manager v0.28.0 // indirect
- github.com/operator-framework/operator-registry v1.41.0 // indirect
github.com/otiai10/copy v1.14.0 // indirect
github.com/peterbourgon/diskv v2.0.1+incompatible // indirect
github.com/pkg/errors v0.9.1 // indirect
@@ -206,7 +200,6 @@ require (
github.com/spf13/cast v1.6.0 // indirect
github.com/spf13/cobra v1.8.0 // indirect
github.com/spf13/pflag v1.0.6-0.20210604193023-d5e0c0615ace // indirect
- github.com/stoewer/go-strcase v1.3.0 // indirect
github.com/thoas/go-funk v0.9.2 // indirect
github.com/vincent-petithory/dataurl v1.0.0 // indirect
github.com/xlab/treeprint v1.2.0 // indirect
@@ -225,9 +218,6 @@ require (
golang.org/x/time v0.5.0 // indirect
golang.org/x/tools v0.22.0 // indirect
gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect
- google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be // indirect
- google.golang.org/grpc v1.63.2 // indirect
google.golang.org/protobuf v1.34.1 // indirect
gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect
diff --git a/go.sum b/go.sum
index 582e5a66a..80a5bbce3 100644
--- a/go.sum
+++ b/go.sum
@@ -754,8 +754,6 @@ cloud.google.com/go/workflows v1.11.1/go.mod h1:Z+t10G1wF7h8LgdY/EmRcQY8ptBD/nvo
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc=
-github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU=
-github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
github.com/Azure/azure-sdk-for-go v62.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
@@ -772,8 +770,6 @@ github.com/Azure/go-autorest/autorest/validation v0.3.1/go.mod h1:yhLgjC0Wda5DYX
github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8=
github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
-github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
-github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/IBM/keyprotect-go-client v0.5.1/go.mod h1:5TwDM/4FRJq1ZOlwQL1xFahLWQ3TveR88VmL1u3njyI=
github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c/go.mod h1:X0CRv0ky0k6m906ixxpzmDRLvX58TFUKS2eePweuyxk=
@@ -788,10 +784,6 @@ github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
-github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
-github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
-github.com/Microsoft/hcsshim v0.12.0-rc.3 h1:5GNGrobGs/sN/0nFO21W9k4lFn+iXXZAE8fCZbmdRak=
-github.com/Microsoft/hcsshim v0.12.0-rc.3/go.mod h1:WuNfcaYNaw+KpCEsZCIM6HCEmu0c5HfXpi+dDSmveP0=
github.com/NVIDIA/gpu-operator v1.11.1 h1:MuM3nFga8dcK0krgxarYaSmpGSnFYOl2zBQRBNvHKD0=
github.com/NVIDIA/gpu-operator v1.11.1/go.mod h1:cKk+zdpWTj5fR7nRri+DzFoW8aG5iD3RkoYOMCMcNEU=
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
@@ -822,8 +814,6 @@ github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8V
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df/go.mod h1:pSwJ0fSY5KhvocuWSx4fz3BA8OrA1bQn+K1Eli3BRwM=
-github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI=
-github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g=
github.com/apache/arrow/go/v10 v10.0.1/go.mod h1:YvhnlEePVnBS4+0z3fhPfUy7W1Ikj0Ih0vcRo/gZ1M0=
github.com/apache/arrow/go/v11 v11.0.0/go.mod h1:Eg5OsL5H+e299f7u5ssuXsuHQVEGC4xei5aX110hRiI=
github.com/apache/arrow/go/v12 v12.0.0/go.mod h1:d+tV/eHZZ7Dz7RPrFKtPK02tpr+c9/PEd/zm8mDS9Vg=
@@ -860,8 +850,6 @@ github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
-github.com/bshuster-repo/logrus-logstash-hook v1.0.0 h1:e+C0SB5R1pu//O4MQ3f9cFuPGoOVeF2fE4Og9otCc70=
-github.com/bshuster-repo/logrus-logstash-hook v1.0.0/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk=
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
github.com/cavaliergopher/cpio v1.0.1 h1:KQFSeKmZhv0cr+kawA3a0xTQCU4QxXF1vhU7P7av2KM=
@@ -869,15 +857,12 @@ github.com/cavaliergopher/cpio v1.0.1/go.mod h1:pBdaqQjnvXxdS/6CvNDwIANIFSP0xRKI
github.com/cavaliergopher/grab/v3 v3.0.1 h1:4z7TkBfmPjmLAAmkkAZNX/6QJ1nNFdv3SdIHXju0Fr4=
github.com/cavaliergopher/grab/v3 v3.0.1/go.mod h1:1U/KNnD+Ft6JJiYoYBAimKH2XrYptb8Kl3DFGmsjpq4=
github.com/cenkalti/backoff v2.1.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
-github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4=
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/cenkalti/backoff/v3 v3.0.0/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs=
github.com/cenkalti/backoff/v3 v3.2.2 h1:cfUAAO3yvKMYKPrvhDuHSwQnhZNk/RMHKdZqKTxfm6M=
github.com/cenkalti/backoff/v3 v3.2.2/go.mod h1:cIeZDE3IrqwwJl6VUwCN6trj1oXrTS4rc0ij+ULvLYs=
github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw=
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
-github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
-github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.3.0/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw=
@@ -911,35 +896,10 @@ github.com/cncf/xds/go v0.0.0-20230310173818-32f1caf87195/go.mod h1:eXthEFrGJvWH
github.com/cncf/xds/go v0.0.0-20230428030218-4003588d1b74/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cockroachdb/datadriven v1.0.2/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU=
-github.com/containerd/cgroups/v3 v3.0.2 h1:f5WFqIVSgo5IZmtTT3qVBo6TzI1ON6sycSBKkymb9L0=
-github.com/containerd/cgroups/v3 v3.0.2/go.mod h1:JUgITrzdFqp42uI2ryGA+ge0ap/nxzYgkGmIcetmErE=
-github.com/containerd/containerd v1.7.16 h1:7Zsfe8Fkj4Wi2My6DXGQ87hiqIrmOXolm72ZEkFU5Mg=
-github.com/containerd/containerd v1.7.16/go.mod h1:NL49g7A/Fui7ccmxV6zkBWwqMgmMxFWzujYCc+JLt7k=
-github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8=
-github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ=
-github.com/containerd/errdefs v0.1.0 h1:m0wCRBiu1WJT/Fr+iOoQHMQS/eP5myQ8lCv4Dz5ZURM=
-github.com/containerd/errdefs v0.1.0/go.mod h1:YgWiiHtLmSeBrvpw+UfPijzbLaB77mEG1WwJTDETIV0=
-github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
-github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
-github.com/containerd/ttrpc v1.2.3 h1:4jlhbXIGvijRtNC8F/5CpuJZ7yKOBFGFOOXg1bkISz0=
-github.com/containerd/ttrpc v1.2.3/go.mod h1:ieWsXucbb8Mj9PH0rXCw1i8IunRbbAiDkpXkbfflWBM=
-github.com/containerd/typeurl/v2 v2.1.1 h1:3Q4Pt7i8nYwy2KmQWIw2+1hTvwTE/6w9FqcttATPO/4=
-github.com/containerd/typeurl/v2 v2.1.1/go.mod h1:IDp2JFvbwZ31H8dQbEIY7sDl2L3o3HZj1hsSQlywkQ0=
github.com/containernetworking/cni v1.2.1-0.20240513144334-1e7858f9879a h1:+ZeeuLIKTxF+PrcvI7j31Yk7/bWp1MZFVebVjEYy1ms=
github.com/containernetworking/cni v1.2.1-0.20240513144334-1e7858f9879a/go.mod h1:RbSTl6BZ50vy9XlUsGY3U0OF2H7nwaQguD0dcNsPSxo=
github.com/containernetworking/plugins v1.1.0 h1:kTIldaDo9SlbQsjhUKvDx0v9q7zyIFJH/Rm9F4xRBro=
github.com/containernetworking/plugins v1.1.0/go.mod h1:Sr5TH/eBsGLXK/h71HeLfX19sZPp3ry5uHSkI4LPxV8=
-github.com/containers/common v0.58.2 h1:5nu9lQz4QNSgovNk7NRk33SkqkVNKYoXh7L6gXmACow=
-github.com/containers/common v0.58.2/go.mod h1:l3vMqanJGj7tZ3W/i76gEJ128VXgFUO1tLaohJXPvdk=
-github.com/containers/image v3.0.2+incompatible h1:B1lqAE8MUPCrsBLE86J0gnXleeRq8zJnQryhiiGQNyE=
-github.com/containers/image/v5 v5.31.0 h1:eDFVlz5XaYICxe9dXpf23htEKvyosgkl62mJlIATXE4=
-github.com/containers/image/v5 v5.31.0/go.mod h1:5QfOqSackPkSbF7Qxc1DnVNnPJKQ+KWLkfEfDpK590Q=
-github.com/containers/libtrust v0.0.0-20230121012942-c1716e8a8d01 h1:Qzk5C6cYglewc+UyGf6lc8Mj2UaPTHy/iF2De0/77CA=
-github.com/containers/libtrust v0.0.0-20230121012942-c1716e8a8d01/go.mod h1:9rfv8iPl1ZP7aqh9YA68wnZv2NUDbXdcdPHVz0pFbPY=
-github.com/containers/ocicrypt v1.1.9 h1:2Csfba4jse85Raxk5HIyEk8OwZNjRvfkhEGijOjIdEM=
-github.com/containers/ocicrypt v1.1.9/go.mod h1:dTKx1918d8TDkxXvarscpNVY+lyPakPNFN4jwA9GBys=
-github.com/containers/storage v1.54.0 h1:xwYAlf6n9OnIlURQLLg3FYHbO74fQ/2W2N6EtQEUM4I=
-github.com/containers/storage v1.54.0/go.mod h1:PlMOoinRrBSnhYODLxt4EXl0nmJt+X0kjG0Xdt9fMTw=
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
github.com/coreos/bbolt v1.3.3/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
@@ -983,29 +943,7 @@ github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
-github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
-github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
-github.com/distribution/distribution/v3 v3.0.0-alpha.1 h1:jn7I1gvjOvmLztH1+1cLiUFud7aeJCIQcgzugtwjyJo=
-github.com/distribution/distribution/v3 v3.0.0-alpha.1/go.mod h1:LCp4JZp1ZalYg0W/TN05jarCQu+h4w7xc7ZfQF4Y/cY=
-github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
-github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
-github.com/docker/cli v26.1.3+incompatible h1:bUpXT/N0kDE3VUHI2r5VMsYQgi38kYuoC0oL9yt3lqc=
-github.com/docker/cli v26.1.3+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
-github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
-github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
-github.com/docker/docker v26.1.3+incompatible h1:lLCzRbrVZrljpVNobJu1J2FHk8V0s4BawoZippkc+xo=
-github.com/docker/docker v26.1.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
-github.com/docker/docker-credential-helpers v0.8.1 h1:j/eKUktUltBtMzKqmfLB0PAgqYyMHOp5vfsD1807oKo=
-github.com/docker/docker-credential-helpers v0.8.1/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M=
-github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
-github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
-github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c h1:+pKlWGMw7gf6bQ+oDZB4KHQFypsfjYlq/C4rfL7D3g8=
-github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c/go.mod h1:Uw6UezgYA44ePAFQYUehOuCzmy5zmg/+nl2ZfMWGkpA=
-github.com/docker/go-metrics v0.0.1 h1:AgB/0SvBxihN0X8OR4SjsblXkbMvalQ8cjmtKQ2rQV8=
-github.com/docker/go-metrics v0.0.1/go.mod h1:cG1hvH2utMXtqgqqYE9plW6lDxS3/5ayHzueweSI3Vw=
-github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
-github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
@@ -1050,8 +988,6 @@ github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5Kwzbycv
github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
-github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
-github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
@@ -1098,7 +1034,6 @@ github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbV
github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY=
github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
-github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
github.com/go-logr/zapr v1.2.3/go.mod h1:eIauM6P8qSvTw5o2ez6UEAfGjQKrxQTl5EoK+Qa2oG4=
github.com/go-logr/zapr v1.3.0 h1:XGdV8XW8zdwFiwOA2Dryh1gj2KRQyOOoNmBy4EplIcQ=
@@ -1191,8 +1126,6 @@ github.com/golang-jwt/jwt/v4 v4.2.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzw
github.com/golang-jwt/jwt/v4 v4.3.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
github.com/golang-jwt/jwt/v4 v4.4.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
-github.com/golang-migrate/migrate/v4 v4.17.1 h1:4zQ6iqL6t6AiItphxJctQb3cFqWiSpMnX7wLTPnnYO4=
-github.com/golang-migrate/migrate/v4 v4.17.1/go.mod h1:m8hinFyWBn0SA4QKHuKh175Pm9wjmxj3S2Mia7dbXzM=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/glog v1.0.0/go.mod h1:EWib/APOK0SL3dFbYqvxE3UYd8E6s1ouQ7iEp/0LWV4=
@@ -1243,8 +1176,6 @@ github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9
github.com/google/btree v1.1.2 h1:xf4v41cLI2Z6FxbKm+8Bu+m8ifhj15JuZ9sa0jZCMUU=
github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4=
github.com/google/cel-go v0.17.7/go.mod h1:HXZKzB0LXqer5lHHgfWAnlYwJaQBDKMjxjulNQzhwhY=
-github.com/google/cel-go v0.18.2 h1:L0B6sNBSVmt0OyECi8v6VOS74KOc9W/tLiWKfZABvf4=
-github.com/google/cel-go v0.18.2/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg=
github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/gnostic v0.5.7-v3refs/go.mod h1:73MKFl6jIHelAJNaBGFzt3SPtZULs9dYrGFt8OiIsHQ=
github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U=
@@ -1329,11 +1260,7 @@ github.com/googleapis/gnostic v0.5.1/go.mod h1:6U4PtQXGIEt/Z3h5MAT7FNofLnw9vXk2c
github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4=
github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
-github.com/gorilla/handlers v1.5.2 h1:cLTUSsNkgcwhgRqvCNmdbRWG0A3N4F+M2nWKdScwyEE=
-github.com/gorilla/handlers v1.5.2/go.mod h1:dX+xVpaxdSw+q0Qek8SSsl3dfMk3jNddUkMzo0GtH0w=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
-github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
-github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
@@ -1349,17 +1276,10 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmg
github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
-github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9Kz84aCaG7AsGZnLjhHbUqwPg=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 h1:/c3QmbOGMGTOumP2iT/rCwB7b0QDGLKzqOmktBjT+Is=
-github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1/go.mod h1:5SN9VR2LTsRFsrEC6FHgRbTWrTHu6tqPeKxEQv15giM=
-github.com/h2non/filetype v1.1.3 h1:FKkx9QbD7HR/zjK1Ia5XiBsq9zdLi5Kf3zGyFTAFkGg=
-github.com/h2non/filetype v1.1.3/go.mod h1:319b3zT68BvV+WRj7cwy856M2ehB3HqNOt6sy1HndBY=
-github.com/h2non/go-is-svg v0.0.0-20160927212452-35e8c4b0612c h1:fEE5/5VNnYUoBOj2I9TP8Jc+a7lge3QWn9DKE7NCwfc=
-github.com/h2non/go-is-svg v0.0.0-20160927212452-35e8c4b0612c/go.mod h1:ObS/W+h8RYb1Y7fYivughjxojTmIu5iAIjSrSLCLeqE=
github.com/h2non/parth v0.0.0-20190131123155-b4df798d6542/go.mod h1:Ow0tF8D4Kplbc8s8sSb3V2oUCygFHVp8gC3Dn6U4MNI=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
@@ -1404,11 +1324,6 @@ github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c=
-github.com/hashicorp/golang-lru/arc/v2 v2.0.5 h1:l2zaLDubNhW4XO3LnliVj0GXO3+/CGNJAg1dcN2Fpfw=
-github.com/hashicorp/golang-lru/arc/v2 v2.0.5/go.mod h1:ny6zBSQZi2JxIeYcv7kt2sH2PXJtirBN7RDhRpxPkxU=
-github.com/hashicorp/golang-lru/v2 v2.0.5 h1:wW7h1TG88eUIJ2i69gaE3uNVtEPIagzhGvHgwfx2Vm4=
-github.com/hashicorp/golang-lru/v2 v2.0.5/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/hcl v1.0.1-vault-5 h1:kI3hhbbyzr4dldA8UdTb7ZlVVlI2DACdCfz31RPDgJM=
github.com/hashicorp/hcl v1.0.1-vault-5/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM=
@@ -1486,8 +1401,6 @@ github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+o
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
-github.com/klauspost/compress v1.17.8 h1:YcnTYrq7MikUT7k0Yb5eceMmALQPYBW/Xltxn0NAMnU=
-github.com/klauspost/compress v1.17.8/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
@@ -1545,8 +1458,6 @@ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWE
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-sqlite3 v1.14.14/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
-github.com/mattn/go-sqlite3 v2.0.3+incompatible h1:gXHsfypPkaMZrKbD5209QV9jbUTJKjyR5WD3HYQSd+U=
-github.com/mattn/go-sqlite3 v2.0.3+incompatible/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/metal3-io/baremetal-operator/apis v0.6.1 h1:CL5paLPWn0VEAcdtGaKKNMCR9AVYXjSuiO83vwlOHRI=
@@ -1578,16 +1489,8 @@ github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR
github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zxSIeXaQ=
github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
-github.com/moby/locker v1.0.1 h1:fOXqR41zeveg4fFODix+1Ch4mj/gT0NE1XJbp/epuBg=
-github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc=
github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8=
github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c=
-github.com/moby/sys/mountinfo v0.7.1 h1:/tTvQaSJRr2FshkhXiIpux6fQ2Zvc4j7tAhMTStAG2g=
-github.com/moby/sys/mountinfo v0.7.1/go.mod h1:IJb6JQeOklcdMU9F5xQ8ZALD+CUr5VlGpwtX+VE0rpI=
-github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc=
-github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo=
-github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg=
-github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU=
github.com/moby/term v0.0.0-20221205130635-1aeaba878587/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
@@ -1669,16 +1572,10 @@ github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3ev
github.com/onsi/gomega v1.29.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ=
github.com/onsi/gomega v1.33.1 h1:dsYjIxxSR755MDmKVsaFQTE22ChNBcuuTWgkUDSubOk=
github.com/onsi/gomega v1.33.1/go.mod h1:U4R44UsT+9eLIaYRB2a5qajjtQYn0hauxvRm16AVYg0=
-github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
-github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
-github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
-github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM=
-github.com/opencontainers/runtime-spec v1.2.0 h1:z97+pHb3uELt/yiAWD691HNHQIF07bE7dzrbT927iTk=
-github.com/opencontainers/runtime-spec v1.2.0/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0=
github.com/openshift-kni/cluster-group-upgrades-operator v0.0.0-20240423171335-f07cdbf8af2c h1:wAPCXsnAXOUAJ5DYlVgGUcV9YBSiVlH4o9tbQ9py8ZY=
github.com/openshift-kni/cluster-group-upgrades-operator v0.0.0-20240423171335-f07cdbf8af2c/go.mod h1:hkzqKpmQvh7vgPx8Hw6IExJorKPM0dEeJdOXjIW3gNw=
-github.com/openshift-kni/eco-goinfra v0.0.0-20240809133315-01bbbf4c9ede h1:v6GQ4N7NxraI01w77+V+y7LRIZpExkCJq9lj5BowU8I=
-github.com/openshift-kni/eco-goinfra v0.0.0-20240809133315-01bbbf4c9ede/go.mod h1:8ZRjyXSCF76jL77Cd5EQucWPIUTabUM3aI5JblC/vMk=
+github.com/openshift-kni/eco-goinfra v0.0.0-20240809150049-0634a7a9fb27 h1:QdUSvJsUF0V/l0j8GWubibMgvc5YVet4YBCylJim29o=
+github.com/openshift-kni/eco-goinfra v0.0.0-20240809150049-0634a7a9fb27/go.mod h1:tqKhg1yCCQCQanRagr2t2AaYu4+8dtfDam95KoQ6xvA=
github.com/openshift-kni/k8sreporter v1.0.5 h1:1GYBc/BTZyVoXilHef43v9A8BSzw700zAPZ6zsZvo6Y=
github.com/openshift-kni/k8sreporter v1.0.5/go.mod h1:fg8HI9yxiKAi6UzR6NTtrmQmA2WKzUqmkRUHwQ1+Bj8=
github.com/openshift-kni/lifecycle-agent v0.0.0-20240606123201-0c45cd13c2f1 h1:y+0Ecc+MSZA/GNS3VOpKq+XK9x8qoNA7TlyHvqbVbpw=
@@ -1715,10 +1612,6 @@ github.com/openshift/sriov-network-operator v0.0.0-20240508132640-2b61056c9758/g
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/operator-framework/api v0.23.0 h1:kHymOwcHBpBVujT49SKOCd4EVG7Odwj4wl3NbOR2LLA=
github.com/operator-framework/api v0.23.0/go.mod h1:oKcFOz+Xc1UhMi2Pzcp6qsO7wjS4r+yP7EQprQBXrfM=
-github.com/operator-framework/operator-lifecycle-manager v0.28.0 h1:4mgPKwDp8p3giQtHEvunNVcYlgY17vqx/RKgwjTc/2w=
-github.com/operator-framework/operator-lifecycle-manager v0.28.0/go.mod h1:JfufNEB+v2NVuVshDN1tuYjv+zvXvFYrWiINQA3Anok=
-github.com/operator-framework/operator-registry v1.41.0 h1:MFLrMIce1biq6Gd0gsDnjpvYMXwDjoM7I5YdtvUYOLw=
-github.com/operator-framework/operator-registry v1.41.0/go.mod h1:hvOar2ikD/BhyG+I6gGHe85cdqup7NV5b8LkP18H7wU=
github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU=
github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w=
github.com/otiai10/mint v1.5.1 h1:XaPLeE+9vGbuyEHem1JNk3bYc7KKqyI/na0/mLd/Kks=
@@ -1732,8 +1625,6 @@ github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/9
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
-github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5 h1:Ii+DKncOVM8Cu1Hc+ETb5K+23HdAMvESYE3ZJ5b5cMI=
-github.com/phayes/freeport v0.0.0-20220201140144-74d24b5ae9f5/go.mod h1:iIss55rKnNBTvrwdmkUpLnDpZoAHvWaiq5+iMmen4AE=
github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY=
github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
@@ -1810,12 +1701,6 @@ github.com/red-hat-storage/ocs-operator v0.4.13 h1:+FdRGqgewn7v22LvhQUV8iSzLm8d6
github.com/red-hat-storage/ocs-operator v0.4.13/go.mod h1:92CGJGBXykejC89+h8s0pohpb0JRwfwPE9IwUNF5+sY=
github.com/red-hat-storage/odf-operator v0.0.0-20240703093545-0e22236e2160 h1:g54q5MJklMCVUwW4OJoCuaF94HiEHQj5pzcEau/o64U=
github.com/red-hat-storage/odf-operator v0.0.0-20240703093545-0e22236e2160/go.mod h1:uJqcyW9de0frTa708MZR++0SgLvIHiwL793tY6EAuqM=
-github.com/redis/go-redis/extra/rediscmd/v9 v9.0.5 h1:EaDatTxkdHG+U3Bk4EUr+DZ7fOGwTfezUiUJMaIcaho=
-github.com/redis/go-redis/extra/rediscmd/v9 v9.0.5/go.mod h1:fyalQWdtzDBECAQFBJuQe5bzQ02jGd5Qcbgb97Flm7U=
-github.com/redis/go-redis/extra/redisotel/v9 v9.0.5 h1:EfpWLLCyXw8PSM2/XNJLjI3Pb27yVE+gIAfeqp8LUCc=
-github.com/redis/go-redis/extra/redisotel/v9 v9.0.5/go.mod h1:WZjPDy7VNzn77AAfnAfVjZNvfJTYfPetfZk5yoSTLaQ=
-github.com/redis/go-redis/v9 v9.5.1 h1:H1X4D3yHPaYrkL5X06Wh6xNVM/pX0Ft4RV0vMGvLBh8=
-github.com/redis/go-redis/v9 v9.5.1/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M=
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rh-ecosystem-edge/kernel-module-management v0.0.0-20240605101434-e1de2798b3c4 h1:UJTG3zw+Y8MJckZV2JgxTij8mwjWSNc7il+HpVWCXKg=
@@ -1894,8 +1779,6 @@ github.com/spf13/viper v1.7.0/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5q
github.com/stmcginnis/gofish v0.15.1-0.20231121142100-22a60a77be91 h1:WmABtU8y6kTgzoVUn3FWCQGAfyodve3uz3xno28BrRs=
github.com/stmcginnis/gofish v0.15.1-0.20231121142100-22a60a77be91/go.mod h1:BLDSFTp8pDlf/xDbLZa+F7f7eW0E/CHCboggsu8CznI=
github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8=
-github.com/stoewer/go-strcase v1.3.0 h1:g0eASXYtp+yvN9fK8sH94oCIk0fau9uV1/ZdJ0AVEzs=
-github.com/stoewer/go-strcase v1.3.0/go.mod h1:fAH5hQ5pehh+j3nZfvwdk2RgEgQjAoM8wodgtPmh1xo=
github.com/stolostron/cluster-lifecycle-api v0.0.0-20240109072430-f5fe6043d1f8 h1:DRFh4ML+WuDovJsrdgszqMQ4+qGznlYlX9/pItxWwQ8=
github.com/stolostron/cluster-lifecycle-api v0.0.0-20240109072430-f5fe6043d1f8/go.mod h1:ZNQ3Rttgk4HEreCHfocrhXavLDaUgHbZaUqk5dP8/As=
github.com/stolostron/klusterlet-addon-controller v0.0.0-20240606130554-01338045271a h1:eBHb7E/A7Ev2CnggwCKfGpV7nTltmQhK+QYiMEls+AY=
@@ -1922,8 +1805,6 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
-github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635 h1:kdXcSzyDtseVEc4yCz2qF8ZrQvIDBJLl4S1c3GCXmoI=
-github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/thoas/go-funk v0.9.2 h1:oKlNYv0AY5nyf9g+/GhMgS/UO2ces0QRdPKwkhY3VCk=
github.com/thoas/go-funk v0.9.2/go.mod h1:+IWnUfUmFO1+WVYQWQtIJHeRRdaIyyYglZN7xzUPe4Q=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
@@ -1965,8 +1846,6 @@ github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.8/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
-go.etcd.io/bbolt v1.3.10 h1:+BqfJTcCzTItrop8mq/lbzL8wSGtj94UO/3U31shqG0=
-go.etcd.io/bbolt v1.3.10/go.mod h1:bK3UQLPJZly7IlNmV7uVHJDxfe5aK9Ll93e/74Y9oEQ=
go.etcd.io/etcd/api/v3 v3.5.10/go.mod h1:TidfmT4Uycad3NM/o25fG3J07odo4GBB9hoxaodFCtI=
go.etcd.io/etcd/client/pkg/v3 v3.5.10/go.mod h1:DYivfIviIuQ8+/lCq4vcxuseg2P2XbHygkKwFo9fc8U=
go.etcd.io/etcd/client/v2 v2.305.10/go.mod h1:m3CKZi69HzilhVqtPDcjhSGp+kA1OmbNn0qamH80xjA=
@@ -1987,65 +1866,32 @@ go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk=
go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E=
-go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
-go.opentelemetry.io/contrib/exporters/autoexport v0.46.1 h1:ysCfPZB9AjUlMa1UHYup3c9dAOCMQX/6sxSfPBUoxHw=
-go.opentelemetry.io/contrib/exporters/autoexport v0.46.1/go.mod h1:ha0aiYm+DOPsLHjh0zoQ8W8sLT+LJ58J3j47lGpSLrU=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.25.0/go.mod h1:E5NNboN0UqSAki0Atn9kVwaN7I+l25gGxDqBueo/74E=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.42.0/go.mod h1:5z+/ZWJQKXa9YT34fQNx5K8Hd1EoIhvtUygUQPqEOgQ=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.44.0/go.mod h1:SeQhzAEccGVZVEy7aH87Nh0km+utSpo1pTv6eMMop48=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
-go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
go.opentelemetry.io/otel v1.0.1/go.mod h1:OPEOD4jIT2SlZPMmwT6FqZz2C0ZNdQqiWcoK6M0SNFU=
go.opentelemetry.io/otel v1.16.0/go.mod h1:vl0h9NUa1D5s1nv3A5vZOYWn8av4K8Ml6JDeHrT/bx4=
go.opentelemetry.io/otel v1.18.0/go.mod h1:9lWqYO0Db579XzVuCKFNPDl4s73Voa+zEck3wHaAYQI=
go.opentelemetry.io/otel v1.19.0/go.mod h1:i0QyjOq3UPoTzff0PJB2N66fb4S0+rSbSB15/oyH9fY=
-go.opentelemetry.io/otel v1.26.0 h1:LQwgL5s/1W7YiiRwxf03QGnWLb2HW4pLiAhaA5cZXBs=
-go.opentelemetry.io/otel v1.26.0/go.mod h1:UmLkJHUAidDval2EICqBMbnAd0/m2vmpf/dAM+fvFs4=
-go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.26.0 h1:+hm+I+KigBy3M24/h1p/NHkUx/evbLH0PNcjpMyCHc4=
-go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetricgrpc v1.26.0/go.mod h1:NjC8142mLvvNT6biDpaMjyz78kyEHIwAJlSX0N9P5KI=
-go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.26.0 h1:HGZWGmCVRCVyAs2GQaiHQPbDHo+ObFWeUEOd+zDnp64=
-go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.26.0/go.mod h1:SaH+v38LSCHddyk7RGlU9uZyQoRrKao6IBnJw6Kbn+c=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.0.1/go.mod h1:Kv8liBeVNFkkkbilbgWRpV+wWuu+H5xdOT6HAgd30iw=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.23.1 h1:o8iWeVFa1BcLtVEV0LzrCxV2/55tB3xLxADr6Kyoey4=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.23.1/go.mod h1:SEVfdK4IoBnbT2FXNM/k8yC08MrfbhWk3U4ljM8B3HE=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.0.1/go.mod h1:xOvWoTOrQjxjW61xtOmD/WKGRYb/P4NzRo3bs65U6Rk=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.19.0/go.mod h1:0+KuTDyKL4gjKCF75pHOX4wuzYDUZYfAQdSu43o+Z2I=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.23.1 h1:p3A5+f5l9e/kuEBwLOrnpkIDHQFlHmbiVxMURWRK6gQ=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.23.1/go.mod h1:OClrnXUjBqQbInvjJFjYSnMxBSCXBF8r3b34WqjiIrQ=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0 h1:digkEZCJWobwBqMwC0cwCq8/wkkRy/OowZg5OArWZrM=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.21.0/go.mod h1:/OpE/y70qVkndM0TrxT4KBoN3RsFZP0QaofcfYrj76I=
-go.opentelemetry.io/otel/exporters/prometheus v0.44.0 h1:08qeJgaPC0YEBu2PQMbqU3rogTlyzpjhCI2b58Yn00w=
-go.opentelemetry.io/otel/exporters/prometheus v0.44.0/go.mod h1:ERL2uIeBtg4TxZdojHUwzZfIFlUIjZtxubT5p4h1Gjg=
-go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.44.0 h1:dEZWPjVN22urgYCza3PXRUGEyCB++y1sAqm6guWFesk=
-go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v0.44.0/go.mod h1:sTt30Evb7hJB/gEk27qLb1+l9n4Tb8HvHkR0Wx3S6CU=
-go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.21.0 h1:VhlEQAPp9R1ktYfrPk5SOryw1e9LDDTZCbIPFrho0ec=
-go.opentelemetry.io/otel/exporters/stdout/stdouttrace v1.21.0/go.mod h1:kB3ufRbfU+CQ4MlUcqtW8Z7YEOBeK2DJ6CmR5rYYF3E=
go.opentelemetry.io/otel/metric v1.16.0/go.mod h1:QE47cpOmkwipPiefDwo2wDzwJrlfxxNYodqc4xnGCo4=
go.opentelemetry.io/otel/metric v1.18.0/go.mod h1:nNSpsVDjWGfb7chbRLUNW+PBNdcSTHD4Uu5pfFMOI0k=
go.opentelemetry.io/otel/metric v1.19.0/go.mod h1:L5rUsV9kM1IxCj1MmSdS+JQAcVm319EUrDVLrt7jqt8=
-go.opentelemetry.io/otel/metric v1.26.0 h1:7S39CLuY5Jgg9CrnA9HHiEjGMF/X2VHvoXGgSllRz30=
-go.opentelemetry.io/otel/metric v1.26.0/go.mod h1:SY+rHOI4cEawI9a7N1A4nIg/nTQXe1ccCNWYOJUrpX4=
go.opentelemetry.io/otel/sdk v1.0.1/go.mod h1:HrdXne+BiwsOHYYkBE5ysIcv2bvdZstxzmCQhxTcZkI=
go.opentelemetry.io/otel/sdk v1.19.0/go.mod h1:NedEbbS4w3C6zElbLdPJKOpJQOrGUJ+GfzpjUvI0v1A=
-go.opentelemetry.io/otel/sdk v1.26.0 h1:Y7bumHf5tAiDlRYFmGqetNcLaVUZmh4iYfmGxtmz7F8=
-go.opentelemetry.io/otel/sdk v1.26.0/go.mod h1:0p8MXpqLeJ0pzcszQQN4F0S5FVjBLgypeGSngLsmirs=
-go.opentelemetry.io/otel/sdk/metric v1.26.0 h1:cWSks5tfriHPdWFnl+qpX3P681aAYqlZHcAyHw5aU9Y=
-go.opentelemetry.io/otel/sdk/metric v1.26.0/go.mod h1:ClMFFknnThJCksebJwz7KIyEDHO+nTB6gK8obLy8RyE=
go.opentelemetry.io/otel/trace v1.0.1/go.mod h1:5g4i4fKLaX2BQpSBsxw8YYcgKpMMSW3x7ZTuYBr3sUk=
go.opentelemetry.io/otel/trace v1.16.0/go.mod h1:Yt9vYq1SdNz3xdjZZK7wcXv1qv2pwLkqr2QVwea0ef0=
go.opentelemetry.io/otel/trace v1.18.0/go.mod h1:T2+SGJGuYZY3bjj5rgh/hN7KIrlpWC5nS8Mjvzckz+0=
go.opentelemetry.io/otel/trace v1.19.0/go.mod h1:mfaSyvGyEJEI0nyV2I4qhNQnbBOUUmYZpYojqMnX2vo=
-go.opentelemetry.io/otel/trace v1.26.0 h1:1ieeAUb4y0TE26jUFrCIXKpTuVK7uJGN9/Z/2LP5sQA=
-go.opentelemetry.io/otel/trace v1.26.0/go.mod h1:4iDxvGDQuUkHve82hJJ8UqrwswHYsZuWCBllGV2U2y0=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v0.9.0/go.mod h1:1vKfU9rv61e9EVGthD1zNvUbiwPcimSsOPU9brfSHJg=
go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
-go.opentelemetry.io/proto/otlp v1.2.0 h1:pVeZGk7nXDC9O2hncA6nHldxEjm6LByfA2aN8IOkz94=
-go.opentelemetry.io/proto/otlp v1.2.0/go.mod h1:gGpR8txAl5M03pDhMC79G6SdqNV26naRm/KDsgaHD8A=
go.starlark.net v0.0.0-20240123142251-f86470692795 h1:LmbG8Pq7KDGkglKVn8VpZOZj6vb9b8nKEGcg9l03epM=
go.starlark.net v0.0.0-20240123142251-f86470692795/go.mod h1:LcLNIzVOMp4oV+uusnpk+VU+SzXaJakUuBjoCSWH5dM=
go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
@@ -2802,8 +2648,6 @@ google.golang.org/genproto v0.0.0-20230629202037-9506855d4529/go.mod h1:xZnkP7mR
google.golang.org/genproto v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:O9kGHb51iE/nOGvQaDUuadVYqovW56s5emA88lQnj6Y=
google.golang.org/genproto v0.0.0-20230711160842-782d3b101e98/go.mod h1:S7mY02OqCJTD0E1OiQy1F72PWFB4bZJ87cAtLPYgDR0=
google.golang.org/genproto v0.0.0-20230803162519-f966b187b2e5/go.mod h1:oH/ZOT02u4kWEp7oYBGYFFkCdKS/uYR9Z7+0/xuuFp8=
-google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY=
-google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo=
google.golang.org/genproto/googleapis/api v0.0.0-20230525234020-1aefcd67740a/go.mod h1:ts19tUU+Z0ZShN1y3aPyq2+O3d5FUNNgT6FtOzmrNn8=
google.golang.org/genproto/googleapis/api v0.0.0-20230525234035-dd9d682886f9/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig=
google.golang.org/genproto/googleapis/api v0.0.0-20230526203410-71b5a4ffd15e/go.mod h1:vHYtlOoi6TsQ3Uk2yxR7NI5z8uoV+3pZtR4jmHIkRig=
@@ -2812,8 +2656,6 @@ google.golang.org/genproto/googleapis/api v0.0.0-20230629202037-9506855d4529/go.
google.golang.org/genproto/googleapis/api v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:mPBs5jNgx2GuQGvFwUvVKqtn6HsUw9nP64BedgvqEsQ=
google.golang.org/genproto/googleapis/api v0.0.0-20230711160842-782d3b101e98/go.mod h1:rsr7RhLuwsDKL7RmgDDCUc6yaGr1iqceVb5Wv6f6YvQ=
google.golang.org/genproto/googleapis/api v0.0.0-20230726155614-23370e0ffb3e/go.mod h1:rsr7RhLuwsDKL7RmgDDCUc6yaGr1iqceVb5Wv6f6YvQ=
-google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda h1:b6F6WIV4xHHD0FA4oIyzU6mHWg2WI2X1RBehwa5QN38=
-google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda/go.mod h1:AHcE/gZH76Bk/ROZhQphlRoWo5xKDEtz3eVEO1LfA8c=
google.golang.org/genproto/googleapis/bytestream v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:ylj+BE99M198VPbBh6A8d9n3w8fChvyLK3wwBOjXBFA=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234015-3fc162c6f38a/go.mod h1:xURIpW9ES5+/GZhnV6beoEtxQrnkRGIfP5VQG2tCBLc=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA=
@@ -2824,8 +2666,6 @@ google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130/go.
google.golang.org/genproto/googleapis/rpc v0.0.0-20230711160842-782d3b101e98/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230731190214-cbb8c96f2d6d/go.mod h1:TUfxEVdsvPg18p6AslUXFoLdpED4oBnGwyqk3dV1XzM=
google.golang.org/genproto/googleapis/rpc v0.0.0-20230822172742-b8732ec3820d/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be h1:LG9vZxsWGOmUKieR8wPAUR3u3MpnYFQZROPIMaXh7/A=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@@ -2873,8 +2713,6 @@ google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpX
google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo=
google.golang.org/grpc v1.58.2/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0=
google.golang.org/grpc v1.58.3/go.mod h1:tgX3ZQDlNJGU96V6yHh1T/JeoBQ2TXdr43YbYSsCJk0=
-google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM=
-google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA=
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
@@ -3067,8 +2905,6 @@ rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.28.0/go.mod h1:VHVDI/KrK4fjnV61bE2g3sA7tiETLn8sooImelsCx3Y=
-sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.29.0 h1:/U5vjBbQn3RChhv7P11uhYvCSm5G2GaIi5AIGBS6r4c=
-sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.29.0/go.mod h1:z7+wmGM2dfIiLRfrC6jb5kV2Mq/sK1ZP303cxzkV5Y4=
sigs.k8s.io/container-object-storage-interface-api v0.1.0 h1:8tB6JFQhbQIC1hwGQ+q4+tmSSNfjKemb7bFI6C0CK/4=
sigs.k8s.io/container-object-storage-interface-api v0.1.0/go.mod h1:YiB+i/UGkzqgODDhRG3u7jkbWkQcoUeLEJ7hwOT/2Qk=
sigs.k8s.io/controller-runtime v0.17.5 h1:1FI9Lm7NiOOmBsgTV36/s2XrEFXnO2C4sbg/Zme72Rw=
diff --git a/vendor/github.com/antlr4-go/antlr/v4/.gitignore b/vendor/github.com/antlr4-go/antlr/v4/.gitignore
deleted file mode 100644
index 38ea34ff5..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/.gitignore
+++ /dev/null
@@ -1,18 +0,0 @@
-### Go template
-
-# Binaries for programs and plugins
-*.exe
-*.exe~
-*.dll
-*.so
-*.dylib
-
-# Test binary, built with `go test -c`
-*.test
-
-
-# Go workspace file
-go.work
-
-# No Goland stuff in this repo
-.idea
diff --git a/vendor/github.com/antlr4-go/antlr/v4/LICENSE b/vendor/github.com/antlr4-go/antlr/v4/LICENSE
deleted file mode 100644
index a22292eb5..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/LICENSE
+++ /dev/null
@@ -1,28 +0,0 @@
-Copyright (c) 2012-2023 The ANTLR Project. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
-1. Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
-notice, this list of conditions and the following disclaimer in the
-documentation and/or other materials provided with the distribution.
-
-3. Neither name of copyright holders nor the names of its contributors
-may be used to endorse or promote products derived from this software
-without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
-PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/antlr4-go/antlr/v4/README.md b/vendor/github.com/antlr4-go/antlr/v4/README.md
deleted file mode 100644
index 03e5b83eb..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/README.md
+++ /dev/null
@@ -1,54 +0,0 @@
-[![Go Report Card](https://goreportcard.com/badge/github.com/antlr4-go/antlr?style=flat-square)](https://goreportcard.com/report/github.com/antlr4-go/antlr)
-[![PkgGoDev](https://pkg.go.dev/badge/github.com/github.com/antlr4-go/antlr)](https://pkg.go.dev/github.com/antlr4-go/antlr)
-[![Release](https://img.shields.io/github/v/release/antlr4-go/antlr?sort=semver&style=flat-square)](https://github.com/antlr4-go/antlr/releases/latest)
-[![Release](https://img.shields.io/github/go-mod/go-version/antlr4-go/antlr?style=flat-square)](https://github.com/antlr4-go/antlr/releases/latest)
-[![Maintenance](https://img.shields.io/badge/Maintained%3F-yes-green.svg?style=flat-square)](https://github.com/antlr4-go/antlr/commit-activity)
-[![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
-[![GitHub stars](https://img.shields.io/github/stars/antlr4-go/antlr?style=flat-square&label=Star&maxAge=2592000)](https://GitHub.com/Naereen/StrapDown.js/stargazers/)
-# ANTLR4 Go Runtime Module Repo
-
-IMPORTANT: Please submit PRs via a clone of the https://github.com/antlr/antlr4 repo, and not here.
-
- - Do not submit PRs or any change requests to this repo
- - This repo is read only and is updated by the ANTLR team to create a new release of the Go Runtime for ANTLR
- - This repo contains the Go runtime that your generated projects should import
-
-## Introduction
-
-This repo contains the official modules for the Go Runtime for ANTLR. It is a copy of the runtime maintained
-at: https://github.com/antlr/antlr4/tree/master/runtime/Go/antlr and is automatically updated by the ANTLR team to create
-the official Go runtime release only. No development work is carried out in this repo and PRs are not accepted here.
-
-The dev branch of this repo is kept in sync with the dev branch of the main ANTLR repo and is updated periodically.
-
-### Why?
-
-The `go get` command is unable to retrieve the Go runtime when it is embedded so
-deeply in the main repo. A `go get` against the `antlr/antlr4` repo, while retrieving the correct source code for the runtime,
-does not correctly resolve tags and will create a reference in your `go.mod` file that is unclear, will not upgrade smoothly and
-causes confusion.
-
-For instance, the current Go runtime release, which is tagged with v4.13.0 in `antlr/antlr4` is retrieved by go get as:
-
-```sh
-require (
- github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230219212500-1f9a474cc2dc
-)
-```
-
-Where you would expect to see:
-
-```sh
-require (
- github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.13.0
-)
-```
-
-The decision was taken to create a separate org in a separate repo to hold the official Go runtime for ANTLR and
-from whence users can expect `go get` to behave as expected.
-
-
-# Documentation
-Please read the official documentation at: https://github.com/antlr/antlr4/blob/master/doc/index.md for tips on
-migrating existing projects to use the new module location and for information on how to use the Go runtime in
-general.
diff --git a/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go b/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go
deleted file mode 100644
index 3bb4fd7c4..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
-Package antlr implements the Go version of the ANTLR 4 runtime.
-
-# The ANTLR Tool
-
-ANTLR (ANother Tool for Language Recognition) is a powerful parser generator for reading, processing, executing,
-or translating structured text or binary files. It's widely used to build languages, tools, and frameworks.
-From a grammar, ANTLR generates a parser that can build parse trees and also generates a listener interface
-(or visitor) that makes it easy to respond to the recognition of phrases of interest.
-
-# Go Runtime
-
-At version 4.11.x and prior, the Go runtime was not properly versioned for go modules. After this point, the runtime
-source code to be imported was held in the `runtime/Go/antlr/v4` directory, and the go.mod file was updated to reflect the version of
-ANTLR4 that it is compatible with (I.E. uses the /v4 path).
-
-However, this was found to be problematic, as it meant that with the runtime embedded so far underneath the root
-of the repo, the `go get` and related commands could not properly resolve the location of the go runtime source code.
-This meant that the reference to the runtime in your `go.mod` file would refer to the correct source code, but would not
-list the release tag such as @4.12.0 - this was confusing, to say the least.
-
-As of 4.12.1, the runtime is now available as a go module in its own repo, and can be imported as `github.com/antlr4-go/antlr`
-(the go get command should also be used with this path). See the main documentation for the ANTLR4 project for more information,
-which is available at [ANTLR docs]. The documentation for using the Go runtime is available at [Go runtime docs].
-
-This means that if you are using the source code without modules, you should also use the source code in the [new repo].
-Though we highly recommend that you use go modules, as they are now idiomatic for Go.
-
-I am aware that this change will prove Hyrum's Law, but am prepared to live with it for the common good.
-
-Go runtime author: [Jim Idle] jimi@idle.ws
-
-# Code Generation
-
-ANTLR supports the generation of code in a number of [target languages], and the generated code is supported by a
-runtime library, written specifically to support the generated code in the target language. This library is the
-runtime for the Go target.
-
-To generate code for the go target, it is generally recommended to place the source grammar files in a package of
-their own, and use the `.sh` script method of generating code, using the go generate directive. In that same directory
-it is usual, though not required, to place the antlr tool that should be used to generate the code. That does mean
-that the antlr tool JAR file will be checked in to your source code control though, so you are, of course, free to use any other
-way of specifying the version of the ANTLR tool to use, such as aliasing in `.zshrc` or equivalent, or a profile in
-your IDE, or configuration in your CI system. Checking in the jar does mean that it is easy to reproduce the build as
-it was at any point in its history.
-
-Here is a general/recommended template for an ANTLR based recognizer in Go:
-
- .
- ├── parser
- │ ├── mygrammar.g4
- │ ├── antlr-4.12.1-complete.jar
- │ ├── generate.go
- │ └── generate.sh
- ├── parsing - generated code goes here
- │ └── error_listeners.go
- ├── go.mod
- ├── go.sum
- ├── main.go
- └── main_test.go
-
-Make sure that the package statement in your grammar file(s) reflects the go package the generated code will exist in.
-
-The generate.go file then looks like this:
-
- package parser
-
- //go:generate ./generate.sh
-
-And the generate.sh file will look similar to this:
-
- #!/bin/sh
-
- alias antlr4='java -Xmx500M -cp "./antlr4-4.12.1-complete.jar:$CLASSPATH" org.antlr.v4.Tool'
- antlr4 -Dlanguage=Go -no-visitor -package parsing *.g4
-
-depending on whether you want visitors or listeners or any other ANTLR options. Not that another option here
-is to generate the code into a
-
-From the command line at the root of your source package (location of go.mo)d) you can then simply issue the command:
-
- go generate ./...
-
-Which will generate the code for the parser, and place it in the parsing package. You can then use the generated code
-by importing the parsing package.
-
-There are no hard and fast rules on this. It is just a recommendation. You can generate the code in any way and to anywhere you like.
-
-# Copyright Notice
-
-Copyright (c) 2012-2023 The ANTLR Project. All rights reserved.
-
-Use of this file is governed by the BSD 3-clause license, which can be found in the [LICENSE.txt] file in the project root.
-
-[target languages]: https://github.com/antlr/antlr4/tree/master/runtime
-[LICENSE.txt]: https://github.com/antlr/antlr4/blob/master/LICENSE.txt
-[ANTLR docs]: https://github.com/antlr/antlr4/blob/master/doc/index.md
-[new repo]: https://github.com/antlr4-go/antlr
-[Jim Idle]: https://github.com/jimidle
-[Go runtime docs]: https://github.com/antlr/antlr4/blob/master/doc/go-target.md
-*/
-package antlr
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn.go b/vendor/github.com/antlr4-go/antlr/v4/atn.go
deleted file mode 100644
index cdeefed24..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/atn.go
+++ /dev/null
@@ -1,179 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import "sync"
-
-// ATNInvalidAltNumber is used to represent an ALT number that has yet to be calculated or
-// which is invalid for a particular struct such as [*antlr.BaseRuleContext]
-var ATNInvalidAltNumber int
-
-// ATN represents an “[Augmented Transition Network]”, though general in ANTLR the term
-// “Augmented Recursive Transition Network” though there are some descriptions of “[Recursive Transition Network]”
-// in existence.
-//
-// ATNs represent the main networks in the system and are serialized by the code generator and support [ALL(*)].
-//
-// [Augmented Transition Network]: https://en.wikipedia.org/wiki/Augmented_transition_network
-// [ALL(*)]: https://www.antlr.org/papers/allstar-techreport.pdf
-// [Recursive Transition Network]: https://en.wikipedia.org/wiki/Recursive_transition_network
-type ATN struct {
-
- // DecisionToState is the decision points for all rules, sub-rules, optional
- // blocks, ()+, ()*, etc. Each sub-rule/rule is a decision point, and we must track them, so we
- // can go back later and build DFA predictors for them. This includes
- // all the rules, sub-rules, optional blocks, ()+, ()* etc...
- DecisionToState []DecisionState
-
- // grammarType is the ATN type and is used for deserializing ATNs from strings.
- grammarType int
-
- // lexerActions is referenced by action transitions in the ATN for lexer ATNs.
- lexerActions []LexerAction
-
- // maxTokenType is the maximum value for any symbol recognized by a transition in the ATN.
- maxTokenType int
-
- modeNameToStartState map[string]*TokensStartState
-
- modeToStartState []*TokensStartState
-
- // ruleToStartState maps from rule index to starting state number.
- ruleToStartState []*RuleStartState
-
- // ruleToStopState maps from rule index to stop state number.
- ruleToStopState []*RuleStopState
-
- // ruleToTokenType maps the rule index to the resulting token type for lexer
- // ATNs. For parser ATNs, it maps the rule index to the generated bypass token
- // type if ATNDeserializationOptions.isGenerateRuleBypassTransitions was
- // specified, and otherwise is nil.
- ruleToTokenType []int
-
- // ATNStates is a list of all states in the ATN, ordered by state number.
- //
- states []ATNState
-
- mu sync.Mutex
- stateMu sync.RWMutex
- edgeMu sync.RWMutex
-}
-
-// NewATN returns a new ATN struct representing the given grammarType and is used
-// for runtime deserialization of ATNs from the code generated by the ANTLR tool
-func NewATN(grammarType int, maxTokenType int) *ATN {
- return &ATN{
- grammarType: grammarType,
- maxTokenType: maxTokenType,
- modeNameToStartState: make(map[string]*TokensStartState),
- }
-}
-
-// NextTokensInContext computes and returns the set of valid tokens that can occur starting
-// in state s. If ctx is nil, the set of tokens will not include what can follow
-// the rule surrounding s. In other words, the set will be restricted to tokens
-// reachable staying within the rule of s.
-func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
- return NewLL1Analyzer(a).Look(s, nil, ctx)
-}
-
-// NextTokensNoContext computes and returns the set of valid tokens that can occur starting
-// in state s and staying in same rule. [antlr.Token.EPSILON] is in set if we reach end of
-// rule.
-func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
- a.mu.Lock()
- defer a.mu.Unlock()
- iset := s.GetNextTokenWithinRule()
- if iset == nil {
- iset = a.NextTokensInContext(s, nil)
- iset.readOnly = true
- s.SetNextTokenWithinRule(iset)
- }
- return iset
-}
-
-// NextTokens computes and returns the set of valid tokens starting in state s, by
-// calling either [NextTokensNoContext] (ctx == nil) or [NextTokensInContext] (ctx != nil).
-func (a *ATN) NextTokens(s ATNState, ctx RuleContext) *IntervalSet {
- if ctx == nil {
- return a.NextTokensNoContext(s)
- }
-
- return a.NextTokensInContext(s, ctx)
-}
-
-func (a *ATN) addState(state ATNState) {
- if state != nil {
- state.SetATN(a)
- state.SetStateNumber(len(a.states))
- }
-
- a.states = append(a.states, state)
-}
-
-func (a *ATN) removeState(state ATNState) {
- a.states[state.GetStateNumber()] = nil // Just free the memory; don't shift states in the slice
-}
-
-func (a *ATN) defineDecisionState(s DecisionState) int {
- a.DecisionToState = append(a.DecisionToState, s)
- s.setDecision(len(a.DecisionToState) - 1)
-
- return s.getDecision()
-}
-
-func (a *ATN) getDecisionState(decision int) DecisionState {
- if len(a.DecisionToState) == 0 {
- return nil
- }
-
- return a.DecisionToState[decision]
-}
-
-// getExpectedTokens computes the set of input symbols which could follow ATN
-// state number stateNumber in the specified full parse context ctx and returns
-// the set of potentially valid input symbols which could follow the specified
-// state in the specified context. This method considers the complete parser
-// context, but does not evaluate semantic predicates (i.e. all predicates
-// encountered during the calculation are assumed true). If a path in the ATN
-// exists from the starting state to the RuleStopState of the outermost context
-// without Matching any symbols, Token.EOF is added to the returned set.
-//
-// A nil ctx defaults to ParserRuleContext.EMPTY.
-//
-// It panics if the ATN does not contain state stateNumber.
-func (a *ATN) getExpectedTokens(stateNumber int, ctx RuleContext) *IntervalSet {
- if stateNumber < 0 || stateNumber >= len(a.states) {
- panic("Invalid state number.")
- }
-
- s := a.states[stateNumber]
- following := a.NextTokens(s, nil)
-
- if !following.contains(TokenEpsilon) {
- return following
- }
-
- expected := NewIntervalSet()
-
- expected.addSet(following)
- expected.removeOne(TokenEpsilon)
-
- for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
- invokingState := a.states[ctx.GetInvokingState()]
- rt := invokingState.GetTransitions()[0]
-
- following = a.NextTokens(rt.(*RuleTransition).followState, nil)
- expected.addSet(following)
- expected.removeOne(TokenEpsilon)
- ctx = ctx.GetParent().(RuleContext)
- }
-
- if following.contains(TokenEpsilon) {
- expected.addOne(TokenEOF)
- }
-
- return expected
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_config.go b/vendor/github.com/antlr4-go/antlr/v4/atn_config.go
deleted file mode 100644
index a83f25d34..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/atn_config.go
+++ /dev/null
@@ -1,335 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
-)
-
-const (
- lexerConfig = iota // Indicates that this ATNConfig is for a lexer
- parserConfig // Indicates that this ATNConfig is for a parser
-)
-
-// ATNConfig is a tuple: (ATN state, predicted alt, syntactic, semantic
-// context). The syntactic context is a graph-structured stack node whose
-// path(s) to the root is the rule invocation(s) chain used to arrive in the
-// state. The semantic context is the tree of semantic predicates encountered
-// before reaching an ATN state.
-type ATNConfig struct {
- precedenceFilterSuppressed bool
- state ATNState
- alt int
- context *PredictionContext
- semanticContext SemanticContext
- reachesIntoOuterContext int
- cType int // lexerConfig or parserConfig
- lexerActionExecutor *LexerActionExecutor
- passedThroughNonGreedyDecision bool
-}
-
-// NewATNConfig6 creates a new ATNConfig instance given a state, alt and context only
-func NewATNConfig6(state ATNState, alt int, context *PredictionContext) *ATNConfig {
- return NewATNConfig5(state, alt, context, SemanticContextNone)
-}
-
-// NewATNConfig5 creates a new ATNConfig instance given a state, alt, context and semantic context
-func NewATNConfig5(state ATNState, alt int, context *PredictionContext, semanticContext SemanticContext) *ATNConfig {
- if semanticContext == nil {
- panic("semanticContext cannot be nil") // TODO: Necessary?
- }
-
- pac := &ATNConfig{}
- pac.state = state
- pac.alt = alt
- pac.context = context
- pac.semanticContext = semanticContext
- pac.cType = parserConfig
- return pac
-}
-
-// NewATNConfig4 creates a new ATNConfig instance given an existing config, and a state only
-func NewATNConfig4(c *ATNConfig, state ATNState) *ATNConfig {
- return NewATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
-}
-
-// NewATNConfig3 creates a new ATNConfig instance given an existing config, a state and a semantic context
-func NewATNConfig3(c *ATNConfig, state ATNState, semanticContext SemanticContext) *ATNConfig {
- return NewATNConfig(c, state, c.GetContext(), semanticContext)
-}
-
-// NewATNConfig2 creates a new ATNConfig instance given an existing config, and a context only
-func NewATNConfig2(c *ATNConfig, semanticContext SemanticContext) *ATNConfig {
- return NewATNConfig(c, c.GetState(), c.GetContext(), semanticContext)
-}
-
-// NewATNConfig1 creates a new ATNConfig instance given an existing config, a state, and a context only
-func NewATNConfig1(c *ATNConfig, state ATNState, context *PredictionContext) *ATNConfig {
- return NewATNConfig(c, state, context, c.GetSemanticContext())
-}
-
-// NewATNConfig creates a new ATNConfig instance given an existing config, a state, a context and a semantic context, other 'constructors'
-// are just wrappers around this one.
-func NewATNConfig(c *ATNConfig, state ATNState, context *PredictionContext, semanticContext SemanticContext) *ATNConfig {
- if semanticContext == nil {
- panic("semanticContext cannot be nil") // TODO: Remove this - probably put here for some bug that is now fixed
- }
- b := &ATNConfig{}
- b.InitATNConfig(c, state, c.GetAlt(), context, semanticContext)
- b.cType = parserConfig
- return b
-}
-
-func (a *ATNConfig) InitATNConfig(c *ATNConfig, state ATNState, alt int, context *PredictionContext, semanticContext SemanticContext) {
-
- a.state = state
- a.alt = alt
- a.context = context
- a.semanticContext = semanticContext
- a.reachesIntoOuterContext = c.GetReachesIntoOuterContext()
- a.precedenceFilterSuppressed = c.getPrecedenceFilterSuppressed()
-}
-
-func (a *ATNConfig) getPrecedenceFilterSuppressed() bool {
- return a.precedenceFilterSuppressed
-}
-
-func (a *ATNConfig) setPrecedenceFilterSuppressed(v bool) {
- a.precedenceFilterSuppressed = v
-}
-
-// GetState returns the ATN state associated with this configuration
-func (a *ATNConfig) GetState() ATNState {
- return a.state
-}
-
-// GetAlt returns the alternative associated with this configuration
-func (a *ATNConfig) GetAlt() int {
- return a.alt
-}
-
-// SetContext sets the rule invocation stack associated with this configuration
-func (a *ATNConfig) SetContext(v *PredictionContext) {
- a.context = v
-}
-
-// GetContext returns the rule invocation stack associated with this configuration
-func (a *ATNConfig) GetContext() *PredictionContext {
- return a.context
-}
-
-// GetSemanticContext returns the semantic context associated with this configuration
-func (a *ATNConfig) GetSemanticContext() SemanticContext {
- return a.semanticContext
-}
-
-// GetReachesIntoOuterContext returns the count of references to an outer context from this configuration
-func (a *ATNConfig) GetReachesIntoOuterContext() int {
- return a.reachesIntoOuterContext
-}
-
-// SetReachesIntoOuterContext sets the count of references to an outer context from this configuration
-func (a *ATNConfig) SetReachesIntoOuterContext(v int) {
- a.reachesIntoOuterContext = v
-}
-
-// Equals is the default comparison function for an ATNConfig when no specialist implementation is required
-// for a collection.
-//
-// An ATN configuration is equal to another if both have the same state, they
-// predict the same alternative, and syntactic/semantic contexts are the same.
-func (a *ATNConfig) Equals(o Collectable[*ATNConfig]) bool {
- switch a.cType {
- case lexerConfig:
- return a.LEquals(o)
- case parserConfig:
- return a.PEquals(o)
- default:
- panic("Invalid ATNConfig type")
- }
-}
-
-// PEquals is the default comparison function for a Parser ATNConfig when no specialist implementation is required
-// for a collection.
-//
-// An ATN configuration is equal to another if both have the same state, they
-// predict the same alternative, and syntactic/semantic contexts are the same.
-func (a *ATNConfig) PEquals(o Collectable[*ATNConfig]) bool {
- var other, ok = o.(*ATNConfig)
-
- if !ok {
- return false
- }
- if a == other {
- return true
- } else if other == nil {
- return false
- }
-
- var equal bool
-
- if a.context == nil {
- equal = other.context == nil
- } else {
- equal = a.context.Equals(other.context)
- }
-
- var (
- nums = a.state.GetStateNumber() == other.state.GetStateNumber()
- alts = a.alt == other.alt
- cons = a.semanticContext.Equals(other.semanticContext)
- sups = a.precedenceFilterSuppressed == other.precedenceFilterSuppressed
- )
-
- return nums && alts && cons && sups && equal
-}
-
-// Hash is the default hash function for a parser ATNConfig, when no specialist hash function
-// is required for a collection
-func (a *ATNConfig) Hash() int {
- switch a.cType {
- case lexerConfig:
- return a.LHash()
- case parserConfig:
- return a.PHash()
- default:
- panic("Invalid ATNConfig type")
- }
-}
-
-// PHash is the default hash function for a parser ATNConfig, when no specialist hash function
-// is required for a collection
-func (a *ATNConfig) PHash() int {
- var c int
- if a.context != nil {
- c = a.context.Hash()
- }
-
- h := murmurInit(7)
- h = murmurUpdate(h, a.state.GetStateNumber())
- h = murmurUpdate(h, a.alt)
- h = murmurUpdate(h, c)
- h = murmurUpdate(h, a.semanticContext.Hash())
- return murmurFinish(h, 4)
-}
-
-// String returns a string representation of the ATNConfig, usually used for debugging purposes
-func (a *ATNConfig) String() string {
- var s1, s2, s3 string
-
- if a.context != nil {
- s1 = ",[" + fmt.Sprint(a.context) + "]"
- }
-
- if a.semanticContext != SemanticContextNone {
- s2 = "," + fmt.Sprint(a.semanticContext)
- }
-
- if a.reachesIntoOuterContext > 0 {
- s3 = ",up=" + fmt.Sprint(a.reachesIntoOuterContext)
- }
-
- return fmt.Sprintf("(%v,%v%v%v%v)", a.state, a.alt, s1, s2, s3)
-}
-
-func NewLexerATNConfig6(state ATNState, alt int, context *PredictionContext) *ATNConfig {
- lac := &ATNConfig{}
- lac.state = state
- lac.alt = alt
- lac.context = context
- lac.semanticContext = SemanticContextNone
- lac.cType = lexerConfig
- return lac
-}
-
-func NewLexerATNConfig4(c *ATNConfig, state ATNState) *ATNConfig {
- lac := &ATNConfig{}
- lac.lexerActionExecutor = c.lexerActionExecutor
- lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
- lac.InitATNConfig(c, state, c.GetAlt(), c.GetContext(), c.GetSemanticContext())
- lac.cType = lexerConfig
- return lac
-}
-
-func NewLexerATNConfig3(c *ATNConfig, state ATNState, lexerActionExecutor *LexerActionExecutor) *ATNConfig {
- lac := &ATNConfig{}
- lac.lexerActionExecutor = lexerActionExecutor
- lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
- lac.InitATNConfig(c, state, c.GetAlt(), c.GetContext(), c.GetSemanticContext())
- lac.cType = lexerConfig
- return lac
-}
-
-func NewLexerATNConfig2(c *ATNConfig, state ATNState, context *PredictionContext) *ATNConfig {
- lac := &ATNConfig{}
- lac.lexerActionExecutor = c.lexerActionExecutor
- lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
- lac.InitATNConfig(c, state, c.GetAlt(), context, c.GetSemanticContext())
- lac.cType = lexerConfig
- return lac
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewLexerATNConfig1(state ATNState, alt int, context *PredictionContext) *ATNConfig {
- lac := &ATNConfig{}
- lac.state = state
- lac.alt = alt
- lac.context = context
- lac.semanticContext = SemanticContextNone
- lac.cType = lexerConfig
- return lac
-}
-
-// LHash is the default hash function for Lexer ATNConfig objects, it can be used directly or via
-// the default comparator [ObjEqComparator].
-func (a *ATNConfig) LHash() int {
- var f int
- if a.passedThroughNonGreedyDecision {
- f = 1
- } else {
- f = 0
- }
- h := murmurInit(7)
- h = murmurUpdate(h, a.state.GetStateNumber())
- h = murmurUpdate(h, a.alt)
- h = murmurUpdate(h, a.context.Hash())
- h = murmurUpdate(h, a.semanticContext.Hash())
- h = murmurUpdate(h, f)
- h = murmurUpdate(h, a.lexerActionExecutor.Hash())
- h = murmurFinish(h, 6)
- return h
-}
-
-// LEquals is the default comparison function for Lexer ATNConfig objects, it can be used directly or via
-// the default comparator [ObjEqComparator].
-func (a *ATNConfig) LEquals(other Collectable[*ATNConfig]) bool {
- var otherT, ok = other.(*ATNConfig)
- if !ok {
- return false
- } else if a == otherT {
- return true
- } else if a.passedThroughNonGreedyDecision != otherT.passedThroughNonGreedyDecision {
- return false
- }
-
- switch {
- case a.lexerActionExecutor == nil && otherT.lexerActionExecutor == nil:
- return true
- case a.lexerActionExecutor != nil && otherT.lexerActionExecutor != nil:
- if !a.lexerActionExecutor.Equals(otherT.lexerActionExecutor) {
- return false
- }
- default:
- return false // One but not both, are nil
- }
-
- return a.PEquals(otherT)
-}
-
-func checkNonGreedyDecision(source *ATNConfig, target ATNState) bool {
- var ds, ok = target.(DecisionState)
-
- return source.passedThroughNonGreedyDecision || (ok && ds.getNonGreedy())
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go b/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go
deleted file mode 100644
index 52dbaf806..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go
+++ /dev/null
@@ -1,301 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
-)
-
-// ATNConfigSet is a specialized set of ATNConfig that tracks information
-// about its elements and can combine similar configurations using a
-// graph-structured stack.
-type ATNConfigSet struct {
- cachedHash int
-
- // configLookup is used to determine whether two ATNConfigSets are equal. We
- // need all configurations with the same (s, i, _, semctx) to be equal. A key
- // effectively doubles the number of objects associated with ATNConfigs. All
- // keys are hashed by (s, i, _, pi), not including the context. Wiped out when
- // read-only because a set becomes a DFA state.
- configLookup *JStore[*ATNConfig, Comparator[*ATNConfig]]
-
- // configs is the added elements that did not match an existing key in configLookup
- configs []*ATNConfig
-
- // TODO: These fields make me pretty uncomfortable, but it is nice to pack up
- // info together because it saves re-computation. Can we track conflicts as they
- // are added to save scanning configs later?
- conflictingAlts *BitSet
-
- // dipsIntoOuterContext is used by parsers and lexers. In a lexer, it indicates
- // we hit a pred while computing a closure operation. Do not make a DFA state
- // from the ATNConfigSet in this case. TODO: How is this used by parsers?
- dipsIntoOuterContext bool
-
- // fullCtx is whether it is part of a full context LL prediction. Used to
- // determine how to merge $. It is a wildcard with SLL, but not for an LL
- // context merge.
- fullCtx bool
-
- // Used in parser and lexer. In lexer, it indicates we hit a pred
- // while computing a closure operation. Don't make a DFA state from this set.
- hasSemanticContext bool
-
- // readOnly is whether it is read-only. Do not
- // allow any code to manipulate the set if true because DFA states will point at
- // sets and those must not change. It not, protect other fields; conflictingAlts
- // in particular, which is assigned after readOnly.
- readOnly bool
-
- // TODO: These fields make me pretty uncomfortable, but it is nice to pack up
- // info together because it saves re-computation. Can we track conflicts as they
- // are added to save scanning configs later?
- uniqueAlt int
-}
-
-// Alts returns the combined set of alts for all the configurations in this set.
-func (b *ATNConfigSet) Alts() *BitSet {
- alts := NewBitSet()
- for _, it := range b.configs {
- alts.add(it.GetAlt())
- }
- return alts
-}
-
-// NewATNConfigSet creates a new ATNConfigSet instance.
-func NewATNConfigSet(fullCtx bool) *ATNConfigSet {
- return &ATNConfigSet{
- cachedHash: -1,
- configLookup: NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfCompInst, ATNConfigLookupCollection, "NewATNConfigSet()"),
- fullCtx: fullCtx,
- }
-}
-
-// Add merges contexts with existing configs for (s, i, pi, _),
-// where 's' is the ATNConfig.state, 'i' is the ATNConfig.alt, and
-// 'pi' is the [ATNConfig].semanticContext.
-//
-// We use (s,i,pi) as the key.
-// Updates dipsIntoOuterContext and hasSemanticContext when necessary.
-func (b *ATNConfigSet) Add(config *ATNConfig, mergeCache *JPCMap) bool {
- if b.readOnly {
- panic("set is read-only")
- }
-
- if config.GetSemanticContext() != SemanticContextNone {
- b.hasSemanticContext = true
- }
-
- if config.GetReachesIntoOuterContext() > 0 {
- b.dipsIntoOuterContext = true
- }
-
- existing, present := b.configLookup.Put(config)
-
- // The config was not already in the set
- //
- if !present {
- b.cachedHash = -1
- b.configs = append(b.configs, config) // Track order here
- return true
- }
-
- // Merge a previous (s, i, pi, _) with it and save the result
- rootIsWildcard := !b.fullCtx
- merged := merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache)
-
- // No need to check for existing.context because config.context is in the cache,
- // since the only way to create new graphs is the "call rule" and here. We cache
- // at both places.
- existing.SetReachesIntoOuterContext(intMax(existing.GetReachesIntoOuterContext(), config.GetReachesIntoOuterContext()))
-
- // Preserve the precedence filter suppression during the merge
- if config.getPrecedenceFilterSuppressed() {
- existing.setPrecedenceFilterSuppressed(true)
- }
-
- // Replace the context because there is no need to do alt mapping
- existing.SetContext(merged)
-
- return true
-}
-
-// GetStates returns the set of states represented by all configurations in this config set
-func (b *ATNConfigSet) GetStates() *JStore[ATNState, Comparator[ATNState]] {
-
- // states uses the standard comparator and Hash() provided by the ATNState instance
- //
- states := NewJStore[ATNState, Comparator[ATNState]](aStateEqInst, ATNStateCollection, "ATNConfigSet.GetStates()")
-
- for i := 0; i < len(b.configs); i++ {
- states.Put(b.configs[i].GetState())
- }
-
- return states
-}
-
-func (b *ATNConfigSet) GetPredicates() []SemanticContext {
- predicates := make([]SemanticContext, 0)
-
- for i := 0; i < len(b.configs); i++ {
- c := b.configs[i].GetSemanticContext()
-
- if c != SemanticContextNone {
- predicates = append(predicates, c)
- }
- }
-
- return predicates
-}
-
-func (b *ATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) {
- if b.readOnly {
- panic("set is read-only")
- }
-
- // Empty indicate no optimization is possible
- if b.configLookup == nil || b.configLookup.Len() == 0 {
- return
- }
-
- for i := 0; i < len(b.configs); i++ {
- config := b.configs[i]
- config.SetContext(interpreter.getCachedContext(config.GetContext()))
- }
-}
-
-func (b *ATNConfigSet) AddAll(coll []*ATNConfig) bool {
- for i := 0; i < len(coll); i++ {
- b.Add(coll[i], nil)
- }
-
- return false
-}
-
-// Compare The configs are only equal if they are in the same order and their Equals function returns true.
-// Java uses ArrayList.equals(), which requires the same order.
-func (b *ATNConfigSet) Compare(bs *ATNConfigSet) bool {
- if len(b.configs) != len(bs.configs) {
- return false
- }
- for i := 0; i < len(b.configs); i++ {
- if !b.configs[i].Equals(bs.configs[i]) {
- return false
- }
- }
-
- return true
-}
-
-func (b *ATNConfigSet) Equals(other Collectable[ATNConfig]) bool {
- if b == other {
- return true
- } else if _, ok := other.(*ATNConfigSet); !ok {
- return false
- }
-
- other2 := other.(*ATNConfigSet)
- var eca bool
- switch {
- case b.conflictingAlts == nil && other2.conflictingAlts == nil:
- eca = true
- case b.conflictingAlts != nil && other2.conflictingAlts != nil:
- eca = b.conflictingAlts.equals(other2.conflictingAlts)
- }
- return b.configs != nil &&
- b.fullCtx == other2.fullCtx &&
- b.uniqueAlt == other2.uniqueAlt &&
- eca &&
- b.hasSemanticContext == other2.hasSemanticContext &&
- b.dipsIntoOuterContext == other2.dipsIntoOuterContext &&
- b.Compare(other2)
-}
-
-func (b *ATNConfigSet) Hash() int {
- if b.readOnly {
- if b.cachedHash == -1 {
- b.cachedHash = b.hashCodeConfigs()
- }
-
- return b.cachedHash
- }
-
- return b.hashCodeConfigs()
-}
-
-func (b *ATNConfigSet) hashCodeConfigs() int {
- h := 1
- for _, config := range b.configs {
- h = 31*h + config.Hash()
- }
- return h
-}
-
-func (b *ATNConfigSet) Contains(item *ATNConfig) bool {
- if b.readOnly {
- panic("not implemented for read-only sets")
- }
- if b.configLookup == nil {
- return false
- }
- return b.configLookup.Contains(item)
-}
-
-func (b *ATNConfigSet) ContainsFast(item *ATNConfig) bool {
- return b.Contains(item)
-}
-
-func (b *ATNConfigSet) Clear() {
- if b.readOnly {
- panic("set is read-only")
- }
- b.configs = make([]*ATNConfig, 0)
- b.cachedHash = -1
- b.configLookup = NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfCompInst, ATNConfigLookupCollection, "NewATNConfigSet()")
-}
-
-func (b *ATNConfigSet) String() string {
-
- s := "["
-
- for i, c := range b.configs {
- s += c.String()
-
- if i != len(b.configs)-1 {
- s += ", "
- }
- }
-
- s += "]"
-
- if b.hasSemanticContext {
- s += ",hasSemanticContext=" + fmt.Sprint(b.hasSemanticContext)
- }
-
- if b.uniqueAlt != ATNInvalidAltNumber {
- s += ",uniqueAlt=" + fmt.Sprint(b.uniqueAlt)
- }
-
- if b.conflictingAlts != nil {
- s += ",conflictingAlts=" + b.conflictingAlts.String()
- }
-
- if b.dipsIntoOuterContext {
- s += ",dipsIntoOuterContext"
- }
-
- return s
-}
-
-// NewOrderedATNConfigSet creates a config set with a slightly different Hash/Equal pair
-// for use in lexers.
-func NewOrderedATNConfigSet() *ATNConfigSet {
- return &ATNConfigSet{
- cachedHash: -1,
- // This set uses the standard Hash() and Equals() from ATNConfig
- configLookup: NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ATNConfigCollection, "ATNConfigSet.NewOrderedATNConfigSet()"),
- fullCtx: false,
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go b/vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go
deleted file mode 100644
index bdb30b362..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import "errors"
-
-var defaultATNDeserializationOptions = ATNDeserializationOptions{true, true, false}
-
-type ATNDeserializationOptions struct {
- readOnly bool
- verifyATN bool
- generateRuleBypassTransitions bool
-}
-
-func (opts *ATNDeserializationOptions) ReadOnly() bool {
- return opts.readOnly
-}
-
-func (opts *ATNDeserializationOptions) SetReadOnly(readOnly bool) {
- if opts.readOnly {
- panic(errors.New("cannot mutate read only ATNDeserializationOptions"))
- }
- opts.readOnly = readOnly
-}
-
-func (opts *ATNDeserializationOptions) VerifyATN() bool {
- return opts.verifyATN
-}
-
-func (opts *ATNDeserializationOptions) SetVerifyATN(verifyATN bool) {
- if opts.readOnly {
- panic(errors.New("cannot mutate read only ATNDeserializationOptions"))
- }
- opts.verifyATN = verifyATN
-}
-
-func (opts *ATNDeserializationOptions) GenerateRuleBypassTransitions() bool {
- return opts.generateRuleBypassTransitions
-}
-
-func (opts *ATNDeserializationOptions) SetGenerateRuleBypassTransitions(generateRuleBypassTransitions bool) {
- if opts.readOnly {
- panic(errors.New("cannot mutate read only ATNDeserializationOptions"))
- }
- opts.generateRuleBypassTransitions = generateRuleBypassTransitions
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func DefaultATNDeserializationOptions() *ATNDeserializationOptions {
- return NewATNDeserializationOptions(&defaultATNDeserializationOptions)
-}
-
-func NewATNDeserializationOptions(other *ATNDeserializationOptions) *ATNDeserializationOptions {
- o := new(ATNDeserializationOptions)
- if other != nil {
- *o = *other
- o.readOnly = false
- }
- return o
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go b/vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go
deleted file mode 100644
index 2dcb9ae11..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go
+++ /dev/null
@@ -1,684 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strconv"
-)
-
-const serializedVersion = 4
-
-type loopEndStateIntPair struct {
- item0 *LoopEndState
- item1 int
-}
-
-type blockStartStateIntPair struct {
- item0 BlockStartState
- item1 int
-}
-
-type ATNDeserializer struct {
- options *ATNDeserializationOptions
- data []int32
- pos int
-}
-
-func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer {
- if options == nil {
- options = &defaultATNDeserializationOptions
- }
-
- return &ATNDeserializer{options: options}
-}
-
-//goland:noinspection GoUnusedFunction
-func stringInSlice(a string, list []string) int {
- for i, b := range list {
- if b == a {
- return i
- }
- }
-
- return -1
-}
-
-func (a *ATNDeserializer) Deserialize(data []int32) *ATN {
- a.data = data
- a.pos = 0
- a.checkVersion()
-
- atn := a.readATN()
-
- a.readStates(atn)
- a.readRules(atn)
- a.readModes(atn)
-
- sets := a.readSets(atn, nil)
-
- a.readEdges(atn, sets)
- a.readDecisions(atn)
- a.readLexerActions(atn)
- a.markPrecedenceDecisions(atn)
- a.verifyATN(atn)
-
- if a.options.GenerateRuleBypassTransitions() && atn.grammarType == ATNTypeParser {
- a.generateRuleBypassTransitions(atn)
- // Re-verify after modification
- a.verifyATN(atn)
- }
-
- return atn
-
-}
-
-func (a *ATNDeserializer) checkVersion() {
- version := a.readInt()
-
- if version != serializedVersion {
- panic("Could not deserialize ATN with version " + strconv.Itoa(version) + " (expected " + strconv.Itoa(serializedVersion) + ").")
- }
-}
-
-func (a *ATNDeserializer) readATN() *ATN {
- grammarType := a.readInt()
- maxTokenType := a.readInt()
-
- return NewATN(grammarType, maxTokenType)
-}
-
-func (a *ATNDeserializer) readStates(atn *ATN) {
- nstates := a.readInt()
-
- // Allocate worst case size.
- loopBackStateNumbers := make([]loopEndStateIntPair, 0, nstates)
- endStateNumbers := make([]blockStartStateIntPair, 0, nstates)
-
- // Preallocate states slice.
- atn.states = make([]ATNState, 0, nstates)
-
- for i := 0; i < nstates; i++ {
- stype := a.readInt()
-
- // Ignore bad types of states
- if stype == ATNStateInvalidType {
- atn.addState(nil)
- continue
- }
-
- ruleIndex := a.readInt()
-
- s := a.stateFactory(stype, ruleIndex)
-
- if stype == ATNStateLoopEnd {
- loopBackStateNumber := a.readInt()
-
- loopBackStateNumbers = append(loopBackStateNumbers, loopEndStateIntPair{s.(*LoopEndState), loopBackStateNumber})
- } else if s2, ok := s.(BlockStartState); ok {
- endStateNumber := a.readInt()
-
- endStateNumbers = append(endStateNumbers, blockStartStateIntPair{s2, endStateNumber})
- }
-
- atn.addState(s)
- }
-
- // Delay the assignment of loop back and end states until we know all the state
- // instances have been initialized
- for _, pair := range loopBackStateNumbers {
- pair.item0.loopBackState = atn.states[pair.item1]
- }
-
- for _, pair := range endStateNumbers {
- pair.item0.setEndState(atn.states[pair.item1].(*BlockEndState))
- }
-
- numNonGreedyStates := a.readInt()
- for j := 0; j < numNonGreedyStates; j++ {
- stateNumber := a.readInt()
-
- atn.states[stateNumber].(DecisionState).setNonGreedy(true)
- }
-
- numPrecedenceStates := a.readInt()
- for j := 0; j < numPrecedenceStates; j++ {
- stateNumber := a.readInt()
-
- atn.states[stateNumber].(*RuleStartState).isPrecedenceRule = true
- }
-}
-
-func (a *ATNDeserializer) readRules(atn *ATN) {
- nrules := a.readInt()
-
- if atn.grammarType == ATNTypeLexer {
- atn.ruleToTokenType = make([]int, nrules)
- }
-
- atn.ruleToStartState = make([]*RuleStartState, nrules)
-
- for i := range atn.ruleToStartState {
- s := a.readInt()
- startState := atn.states[s].(*RuleStartState)
-
- atn.ruleToStartState[i] = startState
-
- if atn.grammarType == ATNTypeLexer {
- tokenType := a.readInt()
-
- atn.ruleToTokenType[i] = tokenType
- }
- }
-
- atn.ruleToStopState = make([]*RuleStopState, nrules)
-
- for _, state := range atn.states {
- if s2, ok := state.(*RuleStopState); ok {
- atn.ruleToStopState[s2.ruleIndex] = s2
- atn.ruleToStartState[s2.ruleIndex].stopState = s2
- }
- }
-}
-
-func (a *ATNDeserializer) readModes(atn *ATN) {
- nmodes := a.readInt()
- atn.modeToStartState = make([]*TokensStartState, nmodes)
-
- for i := range atn.modeToStartState {
- s := a.readInt()
-
- atn.modeToStartState[i] = atn.states[s].(*TokensStartState)
- }
-}
-
-func (a *ATNDeserializer) readSets(_ *ATN, sets []*IntervalSet) []*IntervalSet {
- m := a.readInt()
-
- // Preallocate the needed capacity.
- if cap(sets)-len(sets) < m {
- isets := make([]*IntervalSet, len(sets), len(sets)+m)
- copy(isets, sets)
- sets = isets
- }
-
- for i := 0; i < m; i++ {
- iset := NewIntervalSet()
-
- sets = append(sets, iset)
-
- n := a.readInt()
- containsEOF := a.readInt()
-
- if containsEOF != 0 {
- iset.addOne(-1)
- }
-
- for j := 0; j < n; j++ {
- i1 := a.readInt()
- i2 := a.readInt()
-
- iset.addRange(i1, i2)
- }
- }
-
- return sets
-}
-
-func (a *ATNDeserializer) readEdges(atn *ATN, sets []*IntervalSet) {
- nedges := a.readInt()
-
- for i := 0; i < nedges; i++ {
- var (
- src = a.readInt()
- trg = a.readInt()
- ttype = a.readInt()
- arg1 = a.readInt()
- arg2 = a.readInt()
- arg3 = a.readInt()
- trans = a.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
- srcState = atn.states[src]
- )
-
- srcState.AddTransition(trans, -1)
- }
-
- // Edges for rule stop states can be derived, so they are not serialized
- for _, state := range atn.states {
- for _, t := range state.GetTransitions() {
- var rt, ok = t.(*RuleTransition)
-
- if !ok {
- continue
- }
-
- outermostPrecedenceReturn := -1
-
- if atn.ruleToStartState[rt.getTarget().GetRuleIndex()].isPrecedenceRule {
- if rt.precedence == 0 {
- outermostPrecedenceReturn = rt.getTarget().GetRuleIndex()
- }
- }
-
- trans := NewEpsilonTransition(rt.followState, outermostPrecedenceReturn)
-
- atn.ruleToStopState[rt.getTarget().GetRuleIndex()].AddTransition(trans, -1)
- }
- }
-
- for _, state := range atn.states {
- if s2, ok := state.(BlockStartState); ok {
- // We need to know the end state to set its start state
- if s2.getEndState() == nil {
- panic("IllegalState")
- }
-
- // Block end states can only be associated to a single block start state
- if s2.getEndState().startState != nil {
- panic("IllegalState")
- }
-
- s2.getEndState().startState = state
- }
-
- if s2, ok := state.(*PlusLoopbackState); ok {
- for _, t := range s2.GetTransitions() {
- if t2, ok := t.getTarget().(*PlusBlockStartState); ok {
- t2.loopBackState = state
- }
- }
- } else if s2, ok := state.(*StarLoopbackState); ok {
- for _, t := range s2.GetTransitions() {
- if t2, ok := t.getTarget().(*StarLoopEntryState); ok {
- t2.loopBackState = state
- }
- }
- }
- }
-}
-
-func (a *ATNDeserializer) readDecisions(atn *ATN) {
- ndecisions := a.readInt()
-
- for i := 0; i < ndecisions; i++ {
- s := a.readInt()
- decState := atn.states[s].(DecisionState)
-
- atn.DecisionToState = append(atn.DecisionToState, decState)
- decState.setDecision(i)
- }
-}
-
-func (a *ATNDeserializer) readLexerActions(atn *ATN) {
- if atn.grammarType == ATNTypeLexer {
- count := a.readInt()
-
- atn.lexerActions = make([]LexerAction, count)
-
- for i := range atn.lexerActions {
- actionType := a.readInt()
- data1 := a.readInt()
- data2 := a.readInt()
- atn.lexerActions[i] = a.lexerActionFactory(actionType, data1, data2)
- }
- }
-}
-
-func (a *ATNDeserializer) generateRuleBypassTransitions(atn *ATN) {
- count := len(atn.ruleToStartState)
-
- for i := 0; i < count; i++ {
- atn.ruleToTokenType[i] = atn.maxTokenType + i + 1
- }
-
- for i := 0; i < count; i++ {
- a.generateRuleBypassTransition(atn, i)
- }
-}
-
-func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
- bypassStart := NewBasicBlockStartState()
-
- bypassStart.ruleIndex = idx
- atn.addState(bypassStart)
-
- bypassStop := NewBlockEndState()
-
- bypassStop.ruleIndex = idx
- atn.addState(bypassStop)
-
- bypassStart.endState = bypassStop
-
- atn.defineDecisionState(&bypassStart.BaseDecisionState)
-
- bypassStop.startState = bypassStart
-
- var excludeTransition Transition
- var endState ATNState
-
- if atn.ruleToStartState[idx].isPrecedenceRule {
- // Wrap from the beginning of the rule to the StarLoopEntryState
- endState = nil
-
- for i := 0; i < len(atn.states); i++ {
- state := atn.states[i]
-
- if a.stateIsEndStateFor(state, idx) != nil {
- endState = state
- excludeTransition = state.(*StarLoopEntryState).loopBackState.GetTransitions()[0]
-
- break
- }
- }
-
- if excludeTransition == nil {
- panic("Couldn't identify final state of the precedence rule prefix section.")
- }
- } else {
- endState = atn.ruleToStopState[idx]
- }
-
- // All non-excluded transitions that currently target end state need to target
- // blockEnd instead
- for i := 0; i < len(atn.states); i++ {
- state := atn.states[i]
-
- for j := 0; j < len(state.GetTransitions()); j++ {
- transition := state.GetTransitions()[j]
-
- if transition == excludeTransition {
- continue
- }
-
- if transition.getTarget() == endState {
- transition.setTarget(bypassStop)
- }
- }
- }
-
- // All transitions leaving the rule start state need to leave blockStart instead
- ruleToStartState := atn.ruleToStartState[idx]
- count := len(ruleToStartState.GetTransitions())
-
- for count > 0 {
- bypassStart.AddTransition(ruleToStartState.GetTransitions()[count-1], -1)
- ruleToStartState.SetTransitions([]Transition{ruleToStartState.GetTransitions()[len(ruleToStartState.GetTransitions())-1]})
- }
-
- // Link the new states
- atn.ruleToStartState[idx].AddTransition(NewEpsilonTransition(bypassStart, -1), -1)
- bypassStop.AddTransition(NewEpsilonTransition(endState, -1), -1)
-
- MatchState := NewBasicState()
-
- atn.addState(MatchState)
- MatchState.AddTransition(NewAtomTransition(bypassStop, atn.ruleToTokenType[idx]), -1)
- bypassStart.AddTransition(NewEpsilonTransition(MatchState, -1), -1)
-}
-
-func (a *ATNDeserializer) stateIsEndStateFor(state ATNState, idx int) ATNState {
- if state.GetRuleIndex() != idx {
- return nil
- }
-
- if _, ok := state.(*StarLoopEntryState); !ok {
- return nil
- }
-
- maybeLoopEndState := state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
-
- if _, ok := maybeLoopEndState.(*LoopEndState); !ok {
- return nil
- }
-
- var _, ok = maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
-
- if maybeLoopEndState.(*LoopEndState).epsilonOnlyTransitions && ok {
- return state
- }
-
- return nil
-}
-
-// markPrecedenceDecisions analyzes the StarLoopEntryState states in the
-// specified ATN to set the StarLoopEntryState.precedenceRuleDecision field to
-// the correct value.
-func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) {
- for _, state := range atn.states {
- if _, ok := state.(*StarLoopEntryState); !ok {
- continue
- }
-
- // We analyze the [ATN] to determine if an ATN decision state is the
- // decision for the closure block that determines whether a
- // precedence rule should continue or complete.
- if atn.ruleToStartState[state.GetRuleIndex()].isPrecedenceRule {
- maybeLoopEndState := state.GetTransitions()[len(state.GetTransitions())-1].getTarget()
-
- if s3, ok := maybeLoopEndState.(*LoopEndState); ok {
- var _, ok2 = maybeLoopEndState.GetTransitions()[0].getTarget().(*RuleStopState)
-
- if s3.epsilonOnlyTransitions && ok2 {
- state.(*StarLoopEntryState).precedenceRuleDecision = true
- }
- }
- }
- }
-}
-
-func (a *ATNDeserializer) verifyATN(atn *ATN) {
- if !a.options.VerifyATN() {
- return
- }
-
- // Verify assumptions
- for _, state := range atn.states {
- if state == nil {
- continue
- }
-
- a.checkCondition(state.GetEpsilonOnlyTransitions() || len(state.GetTransitions()) <= 1, "")
-
- switch s2 := state.(type) {
- case *PlusBlockStartState:
- a.checkCondition(s2.loopBackState != nil, "")
-
- case *StarLoopEntryState:
- a.checkCondition(s2.loopBackState != nil, "")
- a.checkCondition(len(s2.GetTransitions()) == 2, "")
-
- switch s2.transitions[0].getTarget().(type) {
- case *StarBlockStartState:
- _, ok := s2.transitions[1].getTarget().(*LoopEndState)
-
- a.checkCondition(ok, "")
- a.checkCondition(!s2.nonGreedy, "")
-
- case *LoopEndState:
- var _, ok = s2.transitions[1].getTarget().(*StarBlockStartState)
-
- a.checkCondition(ok, "")
- a.checkCondition(s2.nonGreedy, "")
-
- default:
- panic("IllegalState")
- }
-
- case *StarLoopbackState:
- a.checkCondition(len(state.GetTransitions()) == 1, "")
-
- var _, ok = state.GetTransitions()[0].getTarget().(*StarLoopEntryState)
-
- a.checkCondition(ok, "")
-
- case *LoopEndState:
- a.checkCondition(s2.loopBackState != nil, "")
-
- case *RuleStartState:
- a.checkCondition(s2.stopState != nil, "")
-
- case BlockStartState:
- a.checkCondition(s2.getEndState() != nil, "")
-
- case *BlockEndState:
- a.checkCondition(s2.startState != nil, "")
-
- case DecisionState:
- a.checkCondition(len(s2.GetTransitions()) <= 1 || s2.getDecision() >= 0, "")
-
- default:
- var _, ok = s2.(*RuleStopState)
-
- a.checkCondition(len(s2.GetTransitions()) <= 1 || ok, "")
- }
- }
-}
-
-func (a *ATNDeserializer) checkCondition(condition bool, message string) {
- if !condition {
- if message == "" {
- message = "IllegalState"
- }
-
- panic(message)
- }
-}
-
-func (a *ATNDeserializer) readInt() int {
- v := a.data[a.pos]
-
- a.pos++
-
- return int(v) // data is 32 bits but int is at least that big
-}
-
-func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, _, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition {
- target := atn.states[trg]
-
- switch typeIndex {
- case TransitionEPSILON:
- return NewEpsilonTransition(target, -1)
-
- case TransitionRANGE:
- if arg3 != 0 {
- return NewRangeTransition(target, TokenEOF, arg2)
- }
-
- return NewRangeTransition(target, arg1, arg2)
-
- case TransitionRULE:
- return NewRuleTransition(atn.states[arg1], arg2, arg3, target)
-
- case TransitionPREDICATE:
- return NewPredicateTransition(target, arg1, arg2, arg3 != 0)
-
- case TransitionPRECEDENCE:
- return NewPrecedencePredicateTransition(target, arg1)
-
- case TransitionATOM:
- if arg3 != 0 {
- return NewAtomTransition(target, TokenEOF)
- }
-
- return NewAtomTransition(target, arg1)
-
- case TransitionACTION:
- return NewActionTransition(target, arg1, arg2, arg3 != 0)
-
- case TransitionSET:
- return NewSetTransition(target, sets[arg1])
-
- case TransitionNOTSET:
- return NewNotSetTransition(target, sets[arg1])
-
- case TransitionWILDCARD:
- return NewWildcardTransition(target)
- }
-
- panic("The specified transition type is not valid.")
-}
-
-func (a *ATNDeserializer) stateFactory(typeIndex, ruleIndex int) ATNState {
- var s ATNState
-
- switch typeIndex {
- case ATNStateInvalidType:
- return nil
-
- case ATNStateBasic:
- s = NewBasicState()
-
- case ATNStateRuleStart:
- s = NewRuleStartState()
-
- case ATNStateBlockStart:
- s = NewBasicBlockStartState()
-
- case ATNStatePlusBlockStart:
- s = NewPlusBlockStartState()
-
- case ATNStateStarBlockStart:
- s = NewStarBlockStartState()
-
- case ATNStateTokenStart:
- s = NewTokensStartState()
-
- case ATNStateRuleStop:
- s = NewRuleStopState()
-
- case ATNStateBlockEnd:
- s = NewBlockEndState()
-
- case ATNStateStarLoopBack:
- s = NewStarLoopbackState()
-
- case ATNStateStarLoopEntry:
- s = NewStarLoopEntryState()
-
- case ATNStatePlusLoopBack:
- s = NewPlusLoopbackState()
-
- case ATNStateLoopEnd:
- s = NewLoopEndState()
-
- default:
- panic(fmt.Sprintf("state type %d is invalid", typeIndex))
- }
-
- s.SetRuleIndex(ruleIndex)
-
- return s
-}
-
-func (a *ATNDeserializer) lexerActionFactory(typeIndex, data1, data2 int) LexerAction {
- switch typeIndex {
- case LexerActionTypeChannel:
- return NewLexerChannelAction(data1)
-
- case LexerActionTypeCustom:
- return NewLexerCustomAction(data1, data2)
-
- case LexerActionTypeMode:
- return NewLexerModeAction(data1)
-
- case LexerActionTypeMore:
- return LexerMoreActionINSTANCE
-
- case LexerActionTypePopMode:
- return LexerPopModeActionINSTANCE
-
- case LexerActionTypePushMode:
- return NewLexerPushModeAction(data1)
-
- case LexerActionTypeSkip:
- return LexerSkipActionINSTANCE
-
- case LexerActionTypeType:
- return NewLexerTypeAction(data1)
-
- default:
- panic(fmt.Sprintf("lexer action %d is invalid", typeIndex))
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go
deleted file mode 100644
index afe6c9f80..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewATNConfigSet(false))
-
-type IATNSimulator interface {
- SharedContextCache() *PredictionContextCache
- ATN() *ATN
- DecisionToDFA() []*DFA
-}
-
-type BaseATNSimulator struct {
- atn *ATN
- sharedContextCache *PredictionContextCache
- decisionToDFA []*DFA
-}
-
-func (b *BaseATNSimulator) getCachedContext(context *PredictionContext) *PredictionContext {
- if b.sharedContextCache == nil {
- return context
- }
-
- //visited := NewJMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionVisitedCollection, "Visit map in getCachedContext()")
- visited := NewVisitRecord()
- return getCachedBasePredictionContext(context, b.sharedContextCache, visited)
-}
-
-func (b *BaseATNSimulator) SharedContextCache() *PredictionContextCache {
- return b.sharedContextCache
-}
-
-func (b *BaseATNSimulator) ATN() *ATN {
- return b.atn
-}
-
-func (b *BaseATNSimulator) DecisionToDFA() []*DFA {
- return b.decisionToDFA
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_state.go b/vendor/github.com/antlr4-go/antlr/v4/atn_state.go
deleted file mode 100644
index 2ae5807cd..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/atn_state.go
+++ /dev/null
@@ -1,461 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "os"
- "strconv"
-)
-
-// Constants for serialization.
-const (
- ATNStateInvalidType = 0
- ATNStateBasic = 1
- ATNStateRuleStart = 2
- ATNStateBlockStart = 3
- ATNStatePlusBlockStart = 4
- ATNStateStarBlockStart = 5
- ATNStateTokenStart = 6
- ATNStateRuleStop = 7
- ATNStateBlockEnd = 8
- ATNStateStarLoopBack = 9
- ATNStateStarLoopEntry = 10
- ATNStatePlusLoopBack = 11
- ATNStateLoopEnd = 12
-
- ATNStateInvalidStateNumber = -1
-)
-
-//goland:noinspection GoUnusedGlobalVariable
-var ATNStateInitialNumTransitions = 4
-
-type ATNState interface {
- GetEpsilonOnlyTransitions() bool
-
- GetRuleIndex() int
- SetRuleIndex(int)
-
- GetNextTokenWithinRule() *IntervalSet
- SetNextTokenWithinRule(*IntervalSet)
-
- GetATN() *ATN
- SetATN(*ATN)
-
- GetStateType() int
-
- GetStateNumber() int
- SetStateNumber(int)
-
- GetTransitions() []Transition
- SetTransitions([]Transition)
- AddTransition(Transition, int)
-
- String() string
- Hash() int
- Equals(Collectable[ATNState]) bool
-}
-
-type BaseATNState struct {
- // NextTokenWithinRule caches lookahead during parsing. Not used during construction.
- NextTokenWithinRule *IntervalSet
-
- // atn is the current ATN.
- atn *ATN
-
- epsilonOnlyTransitions bool
-
- // ruleIndex tracks the Rule index because there are no Rule objects at runtime.
- ruleIndex int
-
- stateNumber int
-
- stateType int
-
- // Track the transitions emanating from this ATN state.
- transitions []Transition
-}
-
-func NewATNState() *BaseATNState {
- return &BaseATNState{stateNumber: ATNStateInvalidStateNumber, stateType: ATNStateInvalidType}
-}
-
-func (as *BaseATNState) GetRuleIndex() int {
- return as.ruleIndex
-}
-
-func (as *BaseATNState) SetRuleIndex(v int) {
- as.ruleIndex = v
-}
-func (as *BaseATNState) GetEpsilonOnlyTransitions() bool {
- return as.epsilonOnlyTransitions
-}
-
-func (as *BaseATNState) GetATN() *ATN {
- return as.atn
-}
-
-func (as *BaseATNState) SetATN(atn *ATN) {
- as.atn = atn
-}
-
-func (as *BaseATNState) GetTransitions() []Transition {
- return as.transitions
-}
-
-func (as *BaseATNState) SetTransitions(t []Transition) {
- as.transitions = t
-}
-
-func (as *BaseATNState) GetStateType() int {
- return as.stateType
-}
-
-func (as *BaseATNState) GetStateNumber() int {
- return as.stateNumber
-}
-
-func (as *BaseATNState) SetStateNumber(stateNumber int) {
- as.stateNumber = stateNumber
-}
-
-func (as *BaseATNState) GetNextTokenWithinRule() *IntervalSet {
- return as.NextTokenWithinRule
-}
-
-func (as *BaseATNState) SetNextTokenWithinRule(v *IntervalSet) {
- as.NextTokenWithinRule = v
-}
-
-func (as *BaseATNState) Hash() int {
- return as.stateNumber
-}
-
-func (as *BaseATNState) String() string {
- return strconv.Itoa(as.stateNumber)
-}
-
-func (as *BaseATNState) Equals(other Collectable[ATNState]) bool {
- if ot, ok := other.(ATNState); ok {
- return as.stateNumber == ot.GetStateNumber()
- }
-
- return false
-}
-
-func (as *BaseATNState) isNonGreedyExitState() bool {
- return false
-}
-
-func (as *BaseATNState) AddTransition(trans Transition, index int) {
- if len(as.transitions) == 0 {
- as.epsilonOnlyTransitions = trans.getIsEpsilon()
- } else if as.epsilonOnlyTransitions != trans.getIsEpsilon() {
- _, _ = fmt.Fprintf(os.Stdin, "ATN state %d has both epsilon and non-epsilon transitions.\n", as.stateNumber)
- as.epsilonOnlyTransitions = false
- }
-
- // TODO: Check code for already present compared to the Java equivalent
- //alreadyPresent := false
- //for _, t := range as.transitions {
- // if t.getTarget().GetStateNumber() == trans.getTarget().GetStateNumber() {
- // if t.getLabel() != nil && trans.getLabel() != nil && trans.getLabel().Equals(t.getLabel()) {
- // alreadyPresent = true
- // break
- // }
- // } else if t.getIsEpsilon() && trans.getIsEpsilon() {
- // alreadyPresent = true
- // break
- // }
- //}
- //if !alreadyPresent {
- if index == -1 {
- as.transitions = append(as.transitions, trans)
- } else {
- as.transitions = append(as.transitions[:index], append([]Transition{trans}, as.transitions[index:]...)...)
- // TODO: as.transitions.splice(index, 1, trans)
- }
- //} else {
- // _, _ = fmt.Fprintf(os.Stderr, "Transition already present in state %d\n", as.stateNumber)
- //}
-}
-
-type BasicState struct {
- BaseATNState
-}
-
-func NewBasicState() *BasicState {
- return &BasicState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateBasic,
- },
- }
-}
-
-type DecisionState interface {
- ATNState
-
- getDecision() int
- setDecision(int)
-
- getNonGreedy() bool
- setNonGreedy(bool)
-}
-
-type BaseDecisionState struct {
- BaseATNState
- decision int
- nonGreedy bool
-}
-
-func NewBaseDecisionState() *BaseDecisionState {
- return &BaseDecisionState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateBasic,
- },
- decision: -1,
- }
-}
-
-func (s *BaseDecisionState) getDecision() int {
- return s.decision
-}
-
-func (s *BaseDecisionState) setDecision(b int) {
- s.decision = b
-}
-
-func (s *BaseDecisionState) getNonGreedy() bool {
- return s.nonGreedy
-}
-
-func (s *BaseDecisionState) setNonGreedy(b bool) {
- s.nonGreedy = b
-}
-
-type BlockStartState interface {
- DecisionState
-
- getEndState() *BlockEndState
- setEndState(*BlockEndState)
-}
-
-// BaseBlockStartState is the start of a regular (...) block.
-type BaseBlockStartState struct {
- BaseDecisionState
- endState *BlockEndState
-}
-
-func NewBlockStartState() *BaseBlockStartState {
- return &BaseBlockStartState{
- BaseDecisionState: BaseDecisionState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateBasic,
- },
- decision: -1,
- },
- }
-}
-
-func (s *BaseBlockStartState) getEndState() *BlockEndState {
- return s.endState
-}
-
-func (s *BaseBlockStartState) setEndState(b *BlockEndState) {
- s.endState = b
-}
-
-type BasicBlockStartState struct {
- BaseBlockStartState
-}
-
-func NewBasicBlockStartState() *BasicBlockStartState {
- return &BasicBlockStartState{
- BaseBlockStartState: BaseBlockStartState{
- BaseDecisionState: BaseDecisionState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateBlockStart,
- },
- },
- },
- }
-}
-
-var _ BlockStartState = &BasicBlockStartState{}
-
-// BlockEndState is a terminal node of a simple (a|b|c) block.
-type BlockEndState struct {
- BaseATNState
- startState ATNState
-}
-
-func NewBlockEndState() *BlockEndState {
- return &BlockEndState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateBlockEnd,
- },
- startState: nil,
- }
-}
-
-// RuleStopState is the last node in the ATN for a rule, unless that rule is the
-// start symbol. In that case, there is one transition to EOF. Later, we might
-// encode references to all calls to this rule to compute FOLLOW sets for error
-// handling.
-type RuleStopState struct {
- BaseATNState
-}
-
-func NewRuleStopState() *RuleStopState {
- return &RuleStopState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateRuleStop,
- },
- }
-}
-
-type RuleStartState struct {
- BaseATNState
- stopState ATNState
- isPrecedenceRule bool
-}
-
-func NewRuleStartState() *RuleStartState {
- return &RuleStartState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateRuleStart,
- },
- }
-}
-
-// PlusLoopbackState is a decision state for A+ and (A|B)+. It has two
-// transitions: one to the loop back to start of the block, and one to exit.
-type PlusLoopbackState struct {
- BaseDecisionState
-}
-
-func NewPlusLoopbackState() *PlusLoopbackState {
- return &PlusLoopbackState{
- BaseDecisionState: BaseDecisionState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStatePlusLoopBack,
- },
- },
- }
-}
-
-// PlusBlockStartState is the start of a (A|B|...)+ loop. Technically it is a
-// decision state; we don't use it for code generation. Somebody might need it,
-// it is included for completeness. In reality, PlusLoopbackState is the real
-// decision-making node for A+.
-type PlusBlockStartState struct {
- BaseBlockStartState
- loopBackState ATNState
-}
-
-func NewPlusBlockStartState() *PlusBlockStartState {
- return &PlusBlockStartState{
- BaseBlockStartState: BaseBlockStartState{
- BaseDecisionState: BaseDecisionState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStatePlusBlockStart,
- },
- },
- },
- }
-}
-
-var _ BlockStartState = &PlusBlockStartState{}
-
-// StarBlockStartState is the block that begins a closure loop.
-type StarBlockStartState struct {
- BaseBlockStartState
-}
-
-func NewStarBlockStartState() *StarBlockStartState {
- return &StarBlockStartState{
- BaseBlockStartState: BaseBlockStartState{
- BaseDecisionState: BaseDecisionState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateStarBlockStart,
- },
- },
- },
- }
-}
-
-var _ BlockStartState = &StarBlockStartState{}
-
-type StarLoopbackState struct {
- BaseATNState
-}
-
-func NewStarLoopbackState() *StarLoopbackState {
- return &StarLoopbackState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateStarLoopBack,
- },
- }
-}
-
-type StarLoopEntryState struct {
- BaseDecisionState
- loopBackState ATNState
- precedenceRuleDecision bool
-}
-
-func NewStarLoopEntryState() *StarLoopEntryState {
- // False precedenceRuleDecision indicates whether s state can benefit from a precedence DFA during SLL decision making.
- return &StarLoopEntryState{
- BaseDecisionState: BaseDecisionState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateStarLoopEntry,
- },
- },
- }
-}
-
-// LoopEndState marks the end of a * or + loop.
-type LoopEndState struct {
- BaseATNState
- loopBackState ATNState
-}
-
-func NewLoopEndState() *LoopEndState {
- return &LoopEndState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateLoopEnd,
- },
- }
-}
-
-// TokensStartState is the Tokens rule start state linking to each lexer rule start state.
-type TokensStartState struct {
- BaseDecisionState
-}
-
-func NewTokensStartState() *TokensStartState {
- return &TokensStartState{
- BaseDecisionState: BaseDecisionState{
- BaseATNState: BaseATNState{
- stateNumber: ATNStateInvalidStateNumber,
- stateType: ATNStateTokenStart,
- },
- },
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_type.go b/vendor/github.com/antlr4-go/antlr/v4/atn_type.go
deleted file mode 100644
index 3a515a145..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/atn_type.go
+++ /dev/null
@@ -1,11 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-// Represent the type of recognizer an ATN applies to.
-const (
- ATNTypeLexer = 0
- ATNTypeParser = 1
-)
diff --git a/vendor/github.com/antlr4-go/antlr/v4/char_stream.go b/vendor/github.com/antlr4-go/antlr/v4/char_stream.go
deleted file mode 100644
index bd8127b6b..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/char_stream.go
+++ /dev/null
@@ -1,12 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-type CharStream interface {
- IntStream
- GetText(int, int) string
- GetTextFromTokens(start, end Token) string
- GetTextFromInterval(Interval) string
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go b/vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go
deleted file mode 100644
index 1bb0314ea..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go
+++ /dev/null
@@ -1,56 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-// TokenFactory creates CommonToken objects.
-type TokenFactory interface {
- Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token
-}
-
-// CommonTokenFactory is the default TokenFactory implementation.
-type CommonTokenFactory struct {
- // copyText indicates whether CommonToken.setText should be called after
- // constructing tokens to explicitly set the text. This is useful for cases
- // where the input stream might not be able to provide arbitrary substrings of
- // text from the input after the lexer creates a token (e.g. the
- // implementation of CharStream.GetText in UnbufferedCharStream panics an
- // UnsupportedOperationException). Explicitly setting the token text allows
- // Token.GetText to be called at any time regardless of the input stream
- // implementation.
- //
- // The default value is false to avoid the performance and memory overhead of
- // copying text for every token unless explicitly requested.
- copyText bool
-}
-
-func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
- return &CommonTokenFactory{copyText: copyText}
-}
-
-// CommonTokenFactoryDEFAULT is the default CommonTokenFactory. It does not
-// explicitly copy token text when constructing tokens.
-var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
-
-func (c *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token {
- t := NewCommonToken(source, ttype, channel, start, stop)
-
- t.line = line
- t.column = column
-
- if text != "" {
- t.SetText(text)
- } else if c.copyText && source.charStream != nil {
- t.SetText(source.charStream.GetTextFromInterval(NewInterval(start, stop)))
- }
-
- return t
-}
-
-func (c *CommonTokenFactory) createThin(ttype int, text string) Token {
- t := NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
- t.SetText(text)
-
- return t
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go b/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go
deleted file mode 100644
index b75da9df0..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go
+++ /dev/null
@@ -1,450 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "strconv"
-)
-
-// CommonTokenStream is an implementation of TokenStream that loads tokens from
-// a TokenSource on-demand and places the tokens in a buffer to provide access
-// to any previous token by index. This token stream ignores the value of
-// Token.getChannel. If your parser requires the token stream filter tokens to
-// only those on a particular channel, such as Token.DEFAULT_CHANNEL or
-// Token.HIDDEN_CHANNEL, use a filtering token stream such a CommonTokenStream.
-type CommonTokenStream struct {
- channel int
-
- // fetchedEOF indicates whether the Token.EOF token has been fetched from
- // tokenSource and added to tokens. This field improves performance for the
- // following cases:
- //
- // consume: The lookahead check in consume to preven consuming the EOF symbol is
- // optimized by checking the values of fetchedEOF and p instead of calling LA.
- //
- // fetch: The check to prevent adding multiple EOF symbols into tokens is
- // trivial with bt field.
- fetchedEOF bool
-
- // index into [tokens] of the current token (next token to consume).
- // tokens[p] should be LT(1). It is set to -1 when the stream is first
- // constructed or when SetTokenSource is called, indicating that the first token
- // has not yet been fetched from the token source. For additional information,
- // see the documentation of [IntStream] for a description of initializing methods.
- index int
-
- // tokenSource is the [TokenSource] from which tokens for the bt stream are
- // fetched.
- tokenSource TokenSource
-
- // tokens contains all tokens fetched from the token source. The list is considered a
- // complete view of the input once fetchedEOF is set to true.
- tokens []Token
-}
-
-// NewCommonTokenStream creates a new CommonTokenStream instance using the supplied lexer to produce
-// tokens and will pull tokens from the given lexer channel.
-func NewCommonTokenStream(lexer Lexer, channel int) *CommonTokenStream {
- return &CommonTokenStream{
- channel: channel,
- index: -1,
- tokenSource: lexer,
- tokens: make([]Token, 0),
- }
-}
-
-// GetAllTokens returns all tokens currently pulled from the token source.
-func (c *CommonTokenStream) GetAllTokens() []Token {
- return c.tokens
-}
-
-func (c *CommonTokenStream) Mark() int {
- return 0
-}
-
-func (c *CommonTokenStream) Release(_ int) {}
-
-func (c *CommonTokenStream) Reset() {
- c.fetchedEOF = false
- c.tokens = make([]Token, 0)
- c.Seek(0)
-}
-
-func (c *CommonTokenStream) Seek(index int) {
- c.lazyInit()
- c.index = c.adjustSeekIndex(index)
-}
-
-func (c *CommonTokenStream) Get(index int) Token {
- c.lazyInit()
-
- return c.tokens[index]
-}
-
-func (c *CommonTokenStream) Consume() {
- SkipEOFCheck := false
-
- if c.index >= 0 {
- if c.fetchedEOF {
- // The last token in tokens is EOF. Skip the check if p indexes any fetched.
- // token except the last.
- SkipEOFCheck = c.index < len(c.tokens)-1
- } else {
- // No EOF token in tokens. Skip the check if p indexes a fetched token.
- SkipEOFCheck = c.index < len(c.tokens)
- }
- } else {
- // Not yet initialized
- SkipEOFCheck = false
- }
-
- if !SkipEOFCheck && c.LA(1) == TokenEOF {
- panic("cannot consume EOF")
- }
-
- if c.Sync(c.index + 1) {
- c.index = c.adjustSeekIndex(c.index + 1)
- }
-}
-
-// Sync makes sure index i in tokens has a token and returns true if a token is
-// located at index i and otherwise false.
-func (c *CommonTokenStream) Sync(i int) bool {
- n := i - len(c.tokens) + 1 // How many more elements do we need?
-
- if n > 0 {
- fetched := c.fetch(n)
- return fetched >= n
- }
-
- return true
-}
-
-// fetch adds n elements to buffer and returns the actual number of elements
-// added to the buffer.
-func (c *CommonTokenStream) fetch(n int) int {
- if c.fetchedEOF {
- return 0
- }
-
- for i := 0; i < n; i++ {
- t := c.tokenSource.NextToken()
-
- t.SetTokenIndex(len(c.tokens))
- c.tokens = append(c.tokens, t)
-
- if t.GetTokenType() == TokenEOF {
- c.fetchedEOF = true
-
- return i + 1
- }
- }
-
- return n
-}
-
-// GetTokens gets all tokens from start to stop inclusive.
-func (c *CommonTokenStream) GetTokens(start int, stop int, types *IntervalSet) []Token {
- if start < 0 || stop < 0 {
- return nil
- }
-
- c.lazyInit()
-
- subset := make([]Token, 0)
-
- if stop >= len(c.tokens) {
- stop = len(c.tokens) - 1
- }
-
- for i := start; i < stop; i++ {
- t := c.tokens[i]
-
- if t.GetTokenType() == TokenEOF {
- break
- }
-
- if types == nil || types.contains(t.GetTokenType()) {
- subset = append(subset, t)
- }
- }
-
- return subset
-}
-
-func (c *CommonTokenStream) LA(i int) int {
- return c.LT(i).GetTokenType()
-}
-
-func (c *CommonTokenStream) lazyInit() {
- if c.index == -1 {
- c.setup()
- }
-}
-
-func (c *CommonTokenStream) setup() {
- c.Sync(0)
- c.index = c.adjustSeekIndex(0)
-}
-
-func (c *CommonTokenStream) GetTokenSource() TokenSource {
- return c.tokenSource
-}
-
-// SetTokenSource resets the c token stream by setting its token source.
-func (c *CommonTokenStream) SetTokenSource(tokenSource TokenSource) {
- c.tokenSource = tokenSource
- c.tokens = make([]Token, 0)
- c.index = -1
- c.fetchedEOF = false
-}
-
-// NextTokenOnChannel returns the index of the next token on channel given a
-// starting index. Returns i if tokens[i] is on channel. Returns -1 if there are
-// no tokens on channel between 'i' and [TokenEOF].
-func (c *CommonTokenStream) NextTokenOnChannel(i, _ int) int {
- c.Sync(i)
-
- if i >= len(c.tokens) {
- return -1
- }
-
- token := c.tokens[i]
-
- for token.GetChannel() != c.channel {
- if token.GetTokenType() == TokenEOF {
- return -1
- }
-
- i++
- c.Sync(i)
- token = c.tokens[i]
- }
-
- return i
-}
-
-// previousTokenOnChannel returns the index of the previous token on channel
-// given a starting index. Returns i if tokens[i] is on channel. Returns -1 if
-// there are no tokens on channel between i and 0.
-func (c *CommonTokenStream) previousTokenOnChannel(i, channel int) int {
- for i >= 0 && c.tokens[i].GetChannel() != channel {
- i--
- }
-
- return i
-}
-
-// GetHiddenTokensToRight collects all tokens on a specified channel to the
-// right of the current token up until we see a token on DEFAULT_TOKEN_CHANNEL
-// or EOF. If channel is -1, it finds any non-default channel token.
-func (c *CommonTokenStream) GetHiddenTokensToRight(tokenIndex, channel int) []Token {
- c.lazyInit()
-
- if tokenIndex < 0 || tokenIndex >= len(c.tokens) {
- panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
- }
-
- nextOnChannel := c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
- from := tokenIndex + 1
-
- // If no onChannel to the right, then nextOnChannel == -1, so set 'to' to the last token
- var to int
-
- if nextOnChannel == -1 {
- to = len(c.tokens) - 1
- } else {
- to = nextOnChannel
- }
-
- return c.filterForChannel(from, to, channel)
-}
-
-// GetHiddenTokensToLeft collects all tokens on channel to the left of the
-// current token until we see a token on DEFAULT_TOKEN_CHANNEL. If channel is
-// -1, it finds any non default channel token.
-func (c *CommonTokenStream) GetHiddenTokensToLeft(tokenIndex, channel int) []Token {
- c.lazyInit()
-
- if tokenIndex < 0 || tokenIndex >= len(c.tokens) {
- panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
- }
-
- prevOnChannel := c.previousTokenOnChannel(tokenIndex-1, LexerDefaultTokenChannel)
-
- if prevOnChannel == tokenIndex-1 {
- return nil
- }
-
- // If there are none on channel to the left and prevOnChannel == -1 then from = 0
- from := prevOnChannel + 1
- to := tokenIndex - 1
-
- return c.filterForChannel(from, to, channel)
-}
-
-func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token {
- hidden := make([]Token, 0)
-
- for i := left; i < right+1; i++ {
- t := c.tokens[i]
-
- if channel == -1 {
- if t.GetChannel() != LexerDefaultTokenChannel {
- hidden = append(hidden, t)
- }
- } else if t.GetChannel() == channel {
- hidden = append(hidden, t)
- }
- }
-
- if len(hidden) == 0 {
- return nil
- }
-
- return hidden
-}
-
-func (c *CommonTokenStream) GetSourceName() string {
- return c.tokenSource.GetSourceName()
-}
-
-func (c *CommonTokenStream) Size() int {
- return len(c.tokens)
-}
-
-func (c *CommonTokenStream) Index() int {
- return c.index
-}
-
-func (c *CommonTokenStream) GetAllText() string {
- c.Fill()
- return c.GetTextFromInterval(NewInterval(0, len(c.tokens)-1))
-}
-
-func (c *CommonTokenStream) GetTextFromTokens(start, end Token) string {
- if start == nil || end == nil {
- return ""
- }
-
- return c.GetTextFromInterval(NewInterval(start.GetTokenIndex(), end.GetTokenIndex()))
-}
-
-func (c *CommonTokenStream) GetTextFromRuleContext(interval RuleContext) string {
- return c.GetTextFromInterval(interval.GetSourceInterval())
-}
-
-func (c *CommonTokenStream) GetTextFromInterval(interval Interval) string {
- c.lazyInit()
- c.Sync(interval.Stop)
-
- start := interval.Start
- stop := interval.Stop
-
- if start < 0 || stop < 0 {
- return ""
- }
-
- if stop >= len(c.tokens) {
- stop = len(c.tokens) - 1
- }
-
- s := ""
-
- for i := start; i < stop+1; i++ {
- t := c.tokens[i]
-
- if t.GetTokenType() == TokenEOF {
- break
- }
-
- s += t.GetText()
- }
-
- return s
-}
-
-// Fill gets all tokens from the lexer until EOF.
-func (c *CommonTokenStream) Fill() {
- c.lazyInit()
-
- for c.fetch(1000) == 1000 {
- continue
- }
-}
-
-func (c *CommonTokenStream) adjustSeekIndex(i int) int {
- return c.NextTokenOnChannel(i, c.channel)
-}
-
-func (c *CommonTokenStream) LB(k int) Token {
- if k == 0 || c.index-k < 0 {
- return nil
- }
-
- i := c.index
- n := 1
-
- // Find k good tokens looking backward
- for n <= k {
- // Skip off-channel tokens
- i = c.previousTokenOnChannel(i-1, c.channel)
- n++
- }
-
- if i < 0 {
- return nil
- }
-
- return c.tokens[i]
-}
-
-func (c *CommonTokenStream) LT(k int) Token {
- c.lazyInit()
-
- if k == 0 {
- return nil
- }
-
- if k < 0 {
- return c.LB(-k)
- }
-
- i := c.index
- n := 1 // We know tokens[n] is valid
-
- // Find k good tokens
- for n < k {
- // Skip off-channel tokens, but make sure to not look past EOF
- if c.Sync(i + 1) {
- i = c.NextTokenOnChannel(i+1, c.channel)
- }
-
- n++
- }
-
- return c.tokens[i]
-}
-
-// getNumberOfOnChannelTokens counts EOF once.
-func (c *CommonTokenStream) getNumberOfOnChannelTokens() int {
- var n int
-
- c.Fill()
-
- for i := 0; i < len(c.tokens); i++ {
- t := c.tokens[i]
-
- if t.GetChannel() == c.channel {
- n++
- }
-
- if t.GetTokenType() == TokenEOF {
- break
- }
- }
-
- return n
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/comparators.go b/vendor/github.com/antlr4-go/antlr/v4/comparators.go
deleted file mode 100644
index 7467e9b43..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/comparators.go
+++ /dev/null
@@ -1,150 +0,0 @@
-package antlr
-
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-// This file contains all the implementations of custom comparators used for generic collections when the
-// Hash() and Equals() funcs supplied by the struct objects themselves need to be overridden. Normally, we would
-// put the comparators in the source file for the struct themselves, but given the organization of this code is
-// sorta kinda based upon the Java code, I found it confusing trying to find out which comparator was where and used by
-// which instantiation of a collection. For instance, an Array2DHashSet in the Java source, when used with ATNConfig
-// collections requires three different comparators depending on what the collection is being used for. Collecting - pun intended -
-// all the comparators here, makes it much easier to see which implementation of hash and equals is used by which collection.
-// It also makes it easy to verify that the Hash() and Equals() functions marry up with the Java implementations.
-
-// ObjEqComparator is the equivalent of the Java ObjectEqualityComparator, which is the default instance of
-// Equality comparator. We do not have inheritance in Go, only interfaces, so we use generics to enforce some
-// type safety and avoid having to implement this for every type that we want to perform comparison on.
-//
-// This comparator works by using the standard Hash() and Equals() methods of the type T that is being compared. Which
-// allows us to use it in any collection instance that does not require a special hash or equals implementation.
-type ObjEqComparator[T Collectable[T]] struct{}
-
-var (
- aStateEqInst = &ObjEqComparator[ATNState]{}
- aConfEqInst = &ObjEqComparator[*ATNConfig]{}
-
- // aConfCompInst is the comparator used for the ATNConfigSet for the configLookup cache
- aConfCompInst = &ATNConfigComparator[*ATNConfig]{}
- atnConfCompInst = &BaseATNConfigComparator[*ATNConfig]{}
- dfaStateEqInst = &ObjEqComparator[*DFAState]{}
- semctxEqInst = &ObjEqComparator[SemanticContext]{}
- atnAltCfgEqInst = &ATNAltConfigComparator[*ATNConfig]{}
- pContextEqInst = &ObjEqComparator[*PredictionContext]{}
-)
-
-// Equals2 delegates to the Equals() method of type T
-func (c *ObjEqComparator[T]) Equals2(o1, o2 T) bool {
- return o1.Equals(o2)
-}
-
-// Hash1 delegates to the Hash() method of type T
-func (c *ObjEqComparator[T]) Hash1(o T) int {
-
- return o.Hash()
-}
-
-type SemCComparator[T Collectable[T]] struct{}
-
-// ATNConfigComparator is used as the comparator for the configLookup field of an ATNConfigSet
-// and has a custom Equals() and Hash() implementation, because equality is not based on the
-// standard Hash() and Equals() methods of the ATNConfig type.
-type ATNConfigComparator[T Collectable[T]] struct {
-}
-
-// Equals2 is a custom comparator for ATNConfigs specifically for configLookup
-func (c *ATNConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool {
-
- // Same pointer, must be equal, even if both nil
- //
- if o1 == o2 {
- return true
-
- }
-
- // If either are nil, but not both, then the result is false
- //
- if o1 == nil || o2 == nil {
- return false
- }
-
- return o1.GetState().GetStateNumber() == o2.GetState().GetStateNumber() &&
- o1.GetAlt() == o2.GetAlt() &&
- o1.GetSemanticContext().Equals(o2.GetSemanticContext())
-}
-
-// Hash1 is custom hash implementation for ATNConfigs specifically for configLookup
-func (c *ATNConfigComparator[T]) Hash1(o *ATNConfig) int {
-
- hash := 7
- hash = 31*hash + o.GetState().GetStateNumber()
- hash = 31*hash + o.GetAlt()
- hash = 31*hash + o.GetSemanticContext().Hash()
- return hash
-}
-
-// ATNAltConfigComparator is used as the comparator for mapping configs to Alt Bitsets
-type ATNAltConfigComparator[T Collectable[T]] struct {
-}
-
-// Equals2 is a custom comparator for ATNConfigs specifically for configLookup
-func (c *ATNAltConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool {
-
- // Same pointer, must be equal, even if both nil
- //
- if o1 == o2 {
- return true
-
- }
-
- // If either are nil, but not both, then the result is false
- //
- if o1 == nil || o2 == nil {
- return false
- }
-
- return o1.GetState().GetStateNumber() == o2.GetState().GetStateNumber() &&
- o1.GetContext().Equals(o2.GetContext())
-}
-
-// Hash1 is custom hash implementation for ATNConfigs specifically for configLookup
-func (c *ATNAltConfigComparator[T]) Hash1(o *ATNConfig) int {
- h := murmurInit(7)
- h = murmurUpdate(h, o.GetState().GetStateNumber())
- h = murmurUpdate(h, o.GetContext().Hash())
- return murmurFinish(h, 2)
-}
-
-// BaseATNConfigComparator is used as the comparator for the configLookup field of a ATNConfigSet
-// and has a custom Equals() and Hash() implementation, because equality is not based on the
-// standard Hash() and Equals() methods of the ATNConfig type.
-type BaseATNConfigComparator[T Collectable[T]] struct {
-}
-
-// Equals2 is a custom comparator for ATNConfigs specifically for baseATNConfigSet
-func (c *BaseATNConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool {
-
- // Same pointer, must be equal, even if both nil
- //
- if o1 == o2 {
- return true
-
- }
-
- // If either are nil, but not both, then the result is false
- //
- if o1 == nil || o2 == nil {
- return false
- }
-
- return o1.GetState().GetStateNumber() == o2.GetState().GetStateNumber() &&
- o1.GetAlt() == o2.GetAlt() &&
- o1.GetSemanticContext().Equals(o2.GetSemanticContext())
-}
-
-// Hash1 is custom hash implementation for ATNConfigs specifically for configLookup, but in fact just
-// delegates to the standard Hash() method of the ATNConfig type.
-func (c *BaseATNConfigComparator[T]) Hash1(o *ATNConfig) int {
- return o.Hash()
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/configuration.go b/vendor/github.com/antlr4-go/antlr/v4/configuration.go
deleted file mode 100644
index c2b724514..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/configuration.go
+++ /dev/null
@@ -1,214 +0,0 @@
-package antlr
-
-type runtimeConfiguration struct {
- statsTraceStacks bool
- lexerATNSimulatorDebug bool
- lexerATNSimulatorDFADebug bool
- parserATNSimulatorDebug bool
- parserATNSimulatorTraceATNSim bool
- parserATNSimulatorDFADebug bool
- parserATNSimulatorRetryDebug bool
- lRLoopEntryBranchOpt bool
- memoryManager bool
-}
-
-// Global runtime configuration
-var runtimeConfig = runtimeConfiguration{
- lRLoopEntryBranchOpt: true,
-}
-
-type runtimeOption func(*runtimeConfiguration) error
-
-// ConfigureRuntime allows the runtime to be configured globally setting things like trace and statistics options.
-// It uses the functional options pattern for go. This is a package global function as it operates on the runtime
-// configuration regardless of the instantiation of anything higher up such as a parser or lexer. Generally this is
-// used for debugging/tracing/statistics options, which are usually used by the runtime maintainers (or rather the
-// only maintainer). However, it is possible that you might want to use this to set a global option concerning the
-// memory allocation type used by the runtime such as sync.Pool or not.
-//
-// The options are applied in the order they are passed in, so the last option will override any previous options.
-//
-// For example, if you want to turn on the collection create point stack flag to true, you can do:
-//
-// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(true))
-//
-// If you want to turn it off, you can do:
-//
-// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(false))
-func ConfigureRuntime(options ...runtimeOption) error {
- for _, option := range options {
- err := option(&runtimeConfig)
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-// WithStatsTraceStacks sets the global flag indicating whether to collect stack traces at the create-point of
-// certain structs, such as collections, or the use point of certain methods such as Put().
-// Because this can be expensive, it is turned off by default. However, it
-// can be useful to track down exactly where memory is being created and used.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(true))
-//
-// You can turn it off at any time using:
-//
-// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(false))
-func WithStatsTraceStacks(trace bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.statsTraceStacks = trace
- return nil
- }
-}
-
-// WithLexerATNSimulatorDebug sets the global flag indicating whether to log debug information from the lexer [ATN]
-// simulator. This is useful for debugging lexer issues by comparing the output with the Java runtime. Only useful
-// to the runtime maintainers.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDebug(true))
-//
-// You can turn it off at any time using:
-//
-// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDebug(false))
-func WithLexerATNSimulatorDebug(debug bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.lexerATNSimulatorDebug = debug
- return nil
- }
-}
-
-// WithLexerATNSimulatorDFADebug sets the global flag indicating whether to log debug information from the lexer [ATN] [DFA]
-// simulator. This is useful for debugging lexer issues by comparing the output with the Java runtime. Only useful
-// to the runtime maintainers.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDFADebug(true))
-//
-// You can turn it off at any time using:
-//
-// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDFADebug(false))
-func WithLexerATNSimulatorDFADebug(debug bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.lexerATNSimulatorDFADebug = debug
- return nil
- }
-}
-
-// WithParserATNSimulatorDebug sets the global flag indicating whether to log debug information from the parser [ATN]
-// simulator. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful
-// to the runtime maintainers.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDebug(true))
-//
-// You can turn it off at any time using:
-//
-// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDebug(false))
-func WithParserATNSimulatorDebug(debug bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.parserATNSimulatorDebug = debug
- return nil
- }
-}
-
-// WithParserATNSimulatorTraceATNSim sets the global flag indicating whether to log trace information from the parser [ATN] simulator
-// [DFA]. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful
-// to the runtime maintainers.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorTraceATNSim(true))
-//
-// You can turn it off at any time using:
-//
-// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorTraceATNSim(false))
-func WithParserATNSimulatorTraceATNSim(trace bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.parserATNSimulatorTraceATNSim = trace
- return nil
- }
-}
-
-// WithParserATNSimulatorDFADebug sets the global flag indicating whether to log debug information from the parser [ATN] [DFA]
-// simulator. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful
-// to the runtime maintainers.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDFADebug(true))
-//
-// You can turn it off at any time using:
-//
-// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDFADebug(false))
-func WithParserATNSimulatorDFADebug(debug bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.parserATNSimulatorDFADebug = debug
- return nil
- }
-}
-
-// WithParserATNSimulatorRetryDebug sets the global flag indicating whether to log debug information from the parser [ATN] [DFA]
-// simulator when retrying a decision. This is useful for debugging parser issues by comparing the output with the Java runtime.
-// Only useful to the runtime maintainers.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorRetryDebug(true))
-//
-// You can turn it off at any time using:
-//
-// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorRetryDebug(false))
-func WithParserATNSimulatorRetryDebug(debug bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.parserATNSimulatorRetryDebug = debug
- return nil
- }
-}
-
-// WithLRLoopEntryBranchOpt sets the global flag indicating whether let recursive loop operations should be
-// optimized or not. This is useful for debugging parser issues by comparing the output with the Java runtime.
-// It turns off the functionality of [canDropLoopEntryEdgeInLeftRecursiveRule] in [ParserATNSimulator].
-//
-// Note that default is to use this optimization.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithLRLoopEntryBranchOpt(true))
-//
-// You can turn it off at any time using:
-//
-// antlr.ConfigureRuntime(antlr.WithLRLoopEntryBranchOpt(false))
-func WithLRLoopEntryBranchOpt(off bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.lRLoopEntryBranchOpt = off
- return nil
- }
-}
-
-// WithMemoryManager sets the global flag indicating whether to use the memory manager or not. This is useful
-// for poorly constructed grammars that create a lot of garbage. It turns on the functionality of [memoryManager], which
-// will intercept garbage collection and cause available memory to be reused. At the end of the day, this is no substitute
-// for fixing your grammar by ridding yourself of extreme ambiguity. BUt if you are just trying to reuse an opensource
-// grammar, this may help make it more practical.
-//
-// Note that default is to use normal Go memory allocation and not pool memory.
-//
-// Use:
-//
-// antlr.ConfigureRuntime(antlr.WithMemoryManager(true))
-//
-// Note that if you turn this on, you should probably leave it on. You should use only one memory strategy or the other
-// and should remember to nil out any references to the parser or lexer when you are done with them.
-func WithMemoryManager(use bool) runtimeOption {
- return func(config *runtimeConfiguration) error {
- config.memoryManager = use
- return nil
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/dfa.go b/vendor/github.com/antlr4-go/antlr/v4/dfa.go
deleted file mode 100644
index 6b63eb158..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/dfa.go
+++ /dev/null
@@ -1,175 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-// DFA represents the Deterministic Finite Automaton used by the recognizer, including all the states it can
-// reach and the transitions between them.
-type DFA struct {
- // atnStartState is the ATN state in which this was created
- atnStartState DecisionState
-
- decision int
-
- // states is all the DFA states. Use Map to get the old state back; Set can only
- // indicate whether it is there. Go maps implement key hash collisions and so on and are very
- // good, but the DFAState is an object and can't be used directly as the key as it can in say Java
- // amd C#, whereby if the hashcode is the same for two objects, then Equals() is called against them
- // to see if they really are the same object. Hence, we have our own map storage.
- //
- states *JStore[*DFAState, *ObjEqComparator[*DFAState]]
-
- numstates int
-
- s0 *DFAState
-
- // precedenceDfa is the backing field for isPrecedenceDfa and setPrecedenceDfa.
- // True if the DFA is for a precedence decision and false otherwise.
- precedenceDfa bool
-}
-
-func NewDFA(atnStartState DecisionState, decision int) *DFA {
- dfa := &DFA{
- atnStartState: atnStartState,
- decision: decision,
- states: nil, // Lazy initialize
- }
- if s, ok := atnStartState.(*StarLoopEntryState); ok && s.precedenceRuleDecision {
- dfa.precedenceDfa = true
- dfa.s0 = NewDFAState(-1, NewATNConfigSet(false))
- dfa.s0.isAcceptState = false
- dfa.s0.requiresFullContext = false
- }
- return dfa
-}
-
-// getPrecedenceStartState gets the start state for the current precedence and
-// returns the start state corresponding to the specified precedence if a start
-// state exists for the specified precedence and nil otherwise. d must be a
-// precedence DFA. See also isPrecedenceDfa.
-func (d *DFA) getPrecedenceStartState(precedence int) *DFAState {
- if !d.getPrecedenceDfa() {
- panic("only precedence DFAs may contain a precedence start state")
- }
-
- // s0.edges is never nil for a precedence DFA
- if precedence < 0 || precedence >= len(d.getS0().getEdges()) {
- return nil
- }
-
- return d.getS0().getIthEdge(precedence)
-}
-
-// setPrecedenceStartState sets the start state for the current precedence. d
-// must be a precedence DFA. See also isPrecedenceDfa.
-func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
- if !d.getPrecedenceDfa() {
- panic("only precedence DFAs may contain a precedence start state")
- }
-
- if precedence < 0 {
- return
- }
-
- // Synchronization on s0 here is ok. When the DFA is turned into a
- // precedence DFA, s0 will be initialized once and not updated again. s0.edges
- // is never nil for a precedence DFA.
- s0 := d.getS0()
- if precedence >= s0.numEdges() {
- edges := append(s0.getEdges(), make([]*DFAState, precedence+1-s0.numEdges())...)
- s0.setEdges(edges)
- d.setS0(s0)
- }
-
- s0.setIthEdge(precedence, startState)
-}
-
-func (d *DFA) getPrecedenceDfa() bool {
- return d.precedenceDfa
-}
-
-// setPrecedenceDfa sets whether d is a precedence DFA. If precedenceDfa differs
-// from the current DFA configuration, then d.states is cleared, the initial
-// state s0 is set to a new DFAState with an empty outgoing DFAState.edges to
-// store the start states for individual precedence values if precedenceDfa is
-// true or nil otherwise, and d.precedenceDfa is updated.
-func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
- if d.getPrecedenceDfa() != precedenceDfa {
- d.states = nil // Lazy initialize
- d.numstates = 0
-
- if precedenceDfa {
- precedenceState := NewDFAState(-1, NewATNConfigSet(false))
- precedenceState.setEdges(make([]*DFAState, 0))
- precedenceState.isAcceptState = false
- precedenceState.requiresFullContext = false
- d.setS0(precedenceState)
- } else {
- d.setS0(nil)
- }
-
- d.precedenceDfa = precedenceDfa
- }
-}
-
-// Len returns the number of states in d. We use this instead of accessing states directly so that we can implement lazy
-// instantiation of the states JMap.
-func (d *DFA) Len() int {
- if d.states == nil {
- return 0
- }
- return d.states.Len()
-}
-
-// Get returns a state that matches s if it is present in the DFA state set. We defer to this
-// function instead of accessing states directly so that we can implement lazy instantiation of the states JMap.
-func (d *DFA) Get(s *DFAState) (*DFAState, bool) {
- if d.states == nil {
- return nil, false
- }
- return d.states.Get(s)
-}
-
-func (d *DFA) Put(s *DFAState) (*DFAState, bool) {
- if d.states == nil {
- d.states = NewJStore[*DFAState, *ObjEqComparator[*DFAState]](dfaStateEqInst, DFAStateCollection, "DFA via DFA.Put")
- }
- return d.states.Put(s)
-}
-
-func (d *DFA) getS0() *DFAState {
- return d.s0
-}
-
-func (d *DFA) setS0(s *DFAState) {
- d.s0 = s
-}
-
-// sortedStates returns the states in d sorted by their state number, or an empty set if d.states is nil.
-func (d *DFA) sortedStates() []*DFAState {
- if d.states == nil {
- return []*DFAState{}
- }
- vs := d.states.SortedSlice(func(i, j *DFAState) bool {
- return i.stateNumber < j.stateNumber
- })
-
- return vs
-}
-
-func (d *DFA) String(literalNames []string, symbolicNames []string) string {
- if d.getS0() == nil {
- return ""
- }
-
- return NewDFASerializer(d, literalNames, symbolicNames).String()
-}
-
-func (d *DFA) ToLexerString() string {
- if d.getS0() == nil {
- return ""
- }
-
- return NewLexerDFASerializer(d).String()
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go b/vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go
deleted file mode 100644
index 0e1100989..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go
+++ /dev/null
@@ -1,158 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strconv"
- "strings"
-)
-
-// DFASerializer is a DFA walker that knows how to dump the DFA states to serialized
-// strings.
-type DFASerializer struct {
- dfa *DFA
- literalNames []string
- symbolicNames []string
-}
-
-func NewDFASerializer(dfa *DFA, literalNames, symbolicNames []string) *DFASerializer {
- if literalNames == nil {
- literalNames = make([]string, 0)
- }
-
- if symbolicNames == nil {
- symbolicNames = make([]string, 0)
- }
-
- return &DFASerializer{
- dfa: dfa,
- literalNames: literalNames,
- symbolicNames: symbolicNames,
- }
-}
-
-func (d *DFASerializer) String() string {
- if d.dfa.getS0() == nil {
- return ""
- }
-
- buf := ""
- states := d.dfa.sortedStates()
-
- for _, s := range states {
- if s.edges != nil {
- n := len(s.edges)
-
- for j := 0; j < n; j++ {
- t := s.edges[j]
-
- if t != nil && t.stateNumber != 0x7FFFFFFF {
- buf += d.GetStateString(s)
- buf += "-"
- buf += d.getEdgeLabel(j)
- buf += "->"
- buf += d.GetStateString(t)
- buf += "\n"
- }
- }
- }
- }
-
- if len(buf) == 0 {
- return ""
- }
-
- return buf
-}
-
-func (d *DFASerializer) getEdgeLabel(i int) string {
- if i == 0 {
- return "EOF"
- } else if d.literalNames != nil && i-1 < len(d.literalNames) {
- return d.literalNames[i-1]
- } else if d.symbolicNames != nil && i-1 < len(d.symbolicNames) {
- return d.symbolicNames[i-1]
- }
-
- return strconv.Itoa(i - 1)
-}
-
-func (d *DFASerializer) GetStateString(s *DFAState) string {
- var a, b string
-
- if s.isAcceptState {
- a = ":"
- }
-
- if s.requiresFullContext {
- b = "^"
- }
-
- baseStateStr := a + "s" + strconv.Itoa(s.stateNumber) + b
-
- if s.isAcceptState {
- if s.predicates != nil {
- return baseStateStr + "=>" + fmt.Sprint(s.predicates)
- }
-
- return baseStateStr + "=>" + fmt.Sprint(s.prediction)
- }
-
- return baseStateStr
-}
-
-type LexerDFASerializer struct {
- *DFASerializer
-}
-
-func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
- return &LexerDFASerializer{DFASerializer: NewDFASerializer(dfa, nil, nil)}
-}
-
-func (l *LexerDFASerializer) getEdgeLabel(i int) string {
- var sb strings.Builder
- sb.Grow(6)
- sb.WriteByte('\'')
- sb.WriteRune(rune(i))
- sb.WriteByte('\'')
- return sb.String()
-}
-
-func (l *LexerDFASerializer) String() string {
- if l.dfa.getS0() == nil {
- return ""
- }
-
- buf := ""
- states := l.dfa.sortedStates()
-
- for i := 0; i < len(states); i++ {
- s := states[i]
-
- if s.edges != nil {
- n := len(s.edges)
-
- for j := 0; j < n; j++ {
- t := s.edges[j]
-
- if t != nil && t.stateNumber != 0x7FFFFFFF {
- buf += l.GetStateString(s)
- buf += "-"
- buf += l.getEdgeLabel(j)
- buf += "->"
- buf += l.GetStateString(t)
- buf += "\n"
- }
- }
- }
- }
-
- if len(buf) == 0 {
- return ""
- }
-
- return buf
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/dfa_state.go b/vendor/github.com/antlr4-go/antlr/v4/dfa_state.go
deleted file mode 100644
index 654143074..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/dfa_state.go
+++ /dev/null
@@ -1,170 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
-)
-
-// PredPrediction maps a predicate to a predicted alternative.
-type PredPrediction struct {
- alt int
- pred SemanticContext
-}
-
-func NewPredPrediction(pred SemanticContext, alt int) *PredPrediction {
- return &PredPrediction{alt: alt, pred: pred}
-}
-
-func (p *PredPrediction) String() string {
- return "(" + fmt.Sprint(p.pred) + ", " + fmt.Sprint(p.alt) + ")"
-}
-
-// DFAState represents a set of possible [ATN] configurations. As Aho, Sethi,
-// Ullman p. 117 says: "The DFA uses its state to keep track of all possible
-// states the ATN can be in after reading each input symbol. That is to say,
-// after reading input a1, a2,..an, the DFA is in a state that represents the
-// subset T of the states of the ATN that are reachable from the ATN's start
-// state along some path labeled a1a2..an."
-//
-// In conventional NFA-to-DFA conversion, therefore, the subset T would be a bitset representing the set of
-// states the [ATN] could be in. We need to track the alt predicted by each state
-// as well, however. More importantly, we need to maintain a stack of states,
-// tracking the closure operations as they jump from rule to rule, emulating
-// rule invocations (method calls). I have to add a stack to simulate the proper
-// lookahead sequences for the underlying LL grammar from which the ATN was
-// derived.
-//
-// I use a set of [ATNConfig] objects, not simple states. An [ATNConfig] is both a
-// state (ala normal conversion) and a [RuleContext] describing the chain of rules
-// (if any) followed to arrive at that state.
-//
-// A [DFAState] may have multiple references to a particular state, but with
-// different [ATN] contexts (with same or different alts) meaning that state was
-// reached via a different set of rule invocations.
-type DFAState struct {
- stateNumber int
- configs *ATNConfigSet
-
- // edges elements point to the target of the symbol. Shift up by 1 so (-1)
- // Token.EOF maps to the first element.
- edges []*DFAState
-
- isAcceptState bool
-
- // prediction is the 'ttype' we match or alt we predict if the state is 'accept'.
- // Set to ATN.INVALID_ALT_NUMBER when predicates != nil or
- // requiresFullContext.
- prediction int
-
- lexerActionExecutor *LexerActionExecutor
-
- // requiresFullContext indicates it was created during an SLL prediction that
- // discovered a conflict between the configurations in the state. Future
- // ParserATNSimulator.execATN invocations immediately jump doing
- // full context prediction if true.
- requiresFullContext bool
-
- // predicates is the predicates associated with the ATN configurations of the
- // DFA state during SLL parsing. When we have predicates, requiresFullContext
- // is false, since full context prediction evaluates predicates on-the-fly. If
- // d is
- // not nil, then prediction is ATN.INVALID_ALT_NUMBER.
- //
- // We only use these for non-requiresFullContext but conflicting states. That
- // means we know from the context (it's $ or we don't dip into outer context)
- // that it's an ambiguity not a conflict.
- //
- // This list is computed by
- // ParserATNSimulator.predicateDFAState.
- predicates []*PredPrediction
-}
-
-func NewDFAState(stateNumber int, configs *ATNConfigSet) *DFAState {
- if configs == nil {
- configs = NewATNConfigSet(false)
- }
-
- return &DFAState{configs: configs, stateNumber: stateNumber}
-}
-
-// GetAltSet gets the set of all alts mentioned by all ATN configurations in d.
-func (d *DFAState) GetAltSet() []int {
- var alts []int
-
- if d.configs != nil {
- for _, c := range d.configs.configs {
- alts = append(alts, c.GetAlt())
- }
- }
-
- if len(alts) == 0 {
- return nil
- }
-
- return alts
-}
-
-func (d *DFAState) getEdges() []*DFAState {
- return d.edges
-}
-
-func (d *DFAState) numEdges() int {
- return len(d.edges)
-}
-
-func (d *DFAState) getIthEdge(i int) *DFAState {
- return d.edges[i]
-}
-
-func (d *DFAState) setEdges(newEdges []*DFAState) {
- d.edges = newEdges
-}
-
-func (d *DFAState) setIthEdge(i int, edge *DFAState) {
- d.edges[i] = edge
-}
-
-func (d *DFAState) setPrediction(v int) {
- d.prediction = v
-}
-
-func (d *DFAState) String() string {
- var s string
- if d.isAcceptState {
- if d.predicates != nil {
- s = "=>" + fmt.Sprint(d.predicates)
- } else {
- s = "=>" + fmt.Sprint(d.prediction)
- }
- }
-
- return fmt.Sprintf("%d:%s%s", d.stateNumber, fmt.Sprint(d.configs), s)
-}
-
-func (d *DFAState) Hash() int {
- h := murmurInit(7)
- h = murmurUpdate(h, d.configs.Hash())
- return murmurFinish(h, 1)
-}
-
-// Equals returns whether d equals other. Two DFAStates are equal if their ATN
-// configuration sets are the same. This method is used to see if a state
-// already exists.
-//
-// Because the number of alternatives and number of ATN configurations are
-// finite, there is a finite number of DFA states that can be processed. This is
-// necessary to show that the algorithm terminates.
-//
-// Cannot test the DFA state numbers here because in
-// ParserATNSimulator.addDFAState we need to know if any other state exists that
-// has d exact set of ATN configurations. The stateNumber is irrelevant.
-func (d *DFAState) Equals(o Collectable[*DFAState]) bool {
- if d == o {
- return true
- }
-
- return d.configs.Equals(o.(*DFAState).configs)
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go b/vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go
deleted file mode 100644
index bd2cd8bc3..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go
+++ /dev/null
@@ -1,110 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "strconv"
-)
-
-//
-// This implementation of {@link ANTLRErrorListener} can be used to identify
-// certain potential correctness and performance problems in grammars. "reports"
-// are made by calling {@link Parser//NotifyErrorListeners} with the appropriate
-// message.
-//
-//
-//
Ambiguities: These are cases where more than one path through the
-// grammar can Match the input.
-//
Weak context sensitivity: These are cases where full-context
-// prediction resolved an SLL conflict to a unique alternative which equaled the
-// minimum alternative of the SLL conflict.
-//
Strong (forced) context sensitivity: These are cases where the
-// full-context prediction resolved an SLL conflict to a unique alternative,
-// and the minimum alternative of the SLL conflict was found to not be
-// a truly viable alternative. Two-stage parsing cannot be used for inputs where
-// d situation occurs.
-//
-
-type DiagnosticErrorListener struct {
- *DefaultErrorListener
-
- exactOnly bool
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewDiagnosticErrorListener(exactOnly bool) *DiagnosticErrorListener {
-
- n := new(DiagnosticErrorListener)
-
- // whether all ambiguities or only exact ambiguities are Reported.
- n.exactOnly = exactOnly
- return n
-}
-
-func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
- if d.exactOnly && !exact {
- return
- }
- msg := "reportAmbiguity d=" +
- d.getDecisionDescription(recognizer, dfa) +
- ": ambigAlts=" +
- d.getConflictingAlts(ambigAlts, configs).String() +
- ", input='" +
- recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
- recognizer.NotifyErrorListeners(msg, nil, nil)
-}
-
-func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, _ *BitSet, _ *ATNConfigSet) {
-
- msg := "reportAttemptingFullContext d=" +
- d.getDecisionDescription(recognizer, dfa) +
- ", input='" +
- recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
- recognizer.NotifyErrorListeners(msg, nil, nil)
-}
-
-func (d *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, _ int, _ *ATNConfigSet) {
- msg := "reportContextSensitivity d=" +
- d.getDecisionDescription(recognizer, dfa) +
- ", input='" +
- recognizer.GetTokenStream().GetTextFromInterval(NewInterval(startIndex, stopIndex)) + "'"
- recognizer.NotifyErrorListeners(msg, nil, nil)
-}
-
-func (d *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, dfa *DFA) string {
- decision := dfa.decision
- ruleIndex := dfa.atnStartState.GetRuleIndex()
-
- ruleNames := recognizer.GetRuleNames()
- if ruleIndex < 0 || ruleIndex >= len(ruleNames) {
- return strconv.Itoa(decision)
- }
- ruleName := ruleNames[ruleIndex]
- if ruleName == "" {
- return strconv.Itoa(decision)
- }
- return strconv.Itoa(decision) + " (" + ruleName + ")"
-}
-
-// Computes the set of conflicting or ambiguous alternatives from a
-// configuration set, if that information was not already provided by the
-// parser.
-//
-// @param ReportedAlts The set of conflicting or ambiguous alternatives, as
-// Reported by the parser.
-// @param configs The conflicting or ambiguous configuration set.
-// @return Returns {@code ReportedAlts} if it is not {@code nil}, otherwise
-// returns the set of alternatives represented in {@code configs}.
-func (d *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set *ATNConfigSet) *BitSet {
- if ReportedAlts != nil {
- return ReportedAlts
- }
- result := NewBitSet()
- for _, c := range set.configs {
- result.add(c.GetAlt())
- }
-
- return result
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/error_listener.go b/vendor/github.com/antlr4-go/antlr/v4/error_listener.go
deleted file mode 100644
index 21a021643..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/error_listener.go
+++ /dev/null
@@ -1,100 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "os"
- "strconv"
-)
-
-// Provides an empty default implementation of {@link ANTLRErrorListener}. The
-// default implementation of each method does nothing, but can be overridden as
-// necessary.
-
-type ErrorListener interface {
- SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException)
- ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet)
- ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet)
- ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet)
-}
-
-type DefaultErrorListener struct {
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewDefaultErrorListener() *DefaultErrorListener {
- return new(DefaultErrorListener)
-}
-
-func (d *DefaultErrorListener) SyntaxError(_ Recognizer, _ interface{}, _, _ int, _ string, _ RecognitionException) {
-}
-
-func (d *DefaultErrorListener) ReportAmbiguity(_ Parser, _ *DFA, _, _ int, _ bool, _ *BitSet, _ *ATNConfigSet) {
-}
-
-func (d *DefaultErrorListener) ReportAttemptingFullContext(_ Parser, _ *DFA, _, _ int, _ *BitSet, _ *ATNConfigSet) {
-}
-
-func (d *DefaultErrorListener) ReportContextSensitivity(_ Parser, _ *DFA, _, _, _ int, _ *ATNConfigSet) {
-}
-
-type ConsoleErrorListener struct {
- *DefaultErrorListener
-}
-
-func NewConsoleErrorListener() *ConsoleErrorListener {
- return new(ConsoleErrorListener)
-}
-
-// ConsoleErrorListenerINSTANCE provides a default instance of {@link ConsoleErrorListener}.
-var ConsoleErrorListenerINSTANCE = NewConsoleErrorListener()
-
-// SyntaxError prints messages to System.err containing the
-// values of line, charPositionInLine, and msg using
-// the following format:
-//
-// line :
-func (c *ConsoleErrorListener) SyntaxError(_ Recognizer, _ interface{}, line, column int, msg string, _ RecognitionException) {
- _, _ = fmt.Fprintln(os.Stderr, "line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg)
-}
-
-type ProxyErrorListener struct {
- *DefaultErrorListener
- delegates []ErrorListener
-}
-
-func NewProxyErrorListener(delegates []ErrorListener) *ProxyErrorListener {
- if delegates == nil {
- panic("delegates is not provided")
- }
- l := new(ProxyErrorListener)
- l.delegates = delegates
- return l
-}
-
-func (p *ProxyErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
- for _, d := range p.delegates {
- d.SyntaxError(recognizer, offendingSymbol, line, column, msg, e)
- }
-}
-
-func (p *ProxyErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
- for _, d := range p.delegates {
- d.ReportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
- }
-}
-
-func (p *ProxyErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) {
- for _, d := range p.delegates {
- d.ReportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)
- }
-}
-
-func (p *ProxyErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) {
- for _, d := range p.delegates {
- d.ReportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/error_strategy.go b/vendor/github.com/antlr4-go/antlr/v4/error_strategy.go
deleted file mode 100644
index 9db2be1c7..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/error_strategy.go
+++ /dev/null
@@ -1,702 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "reflect"
- "strconv"
- "strings"
-)
-
-type ErrorStrategy interface {
- reset(Parser)
- RecoverInline(Parser) Token
- Recover(Parser, RecognitionException)
- Sync(Parser)
- InErrorRecoveryMode(Parser) bool
- ReportError(Parser, RecognitionException)
- ReportMatch(Parser)
-}
-
-// DefaultErrorStrategy is the default implementation of ANTLRErrorStrategy used for
-// error reporting and recovery in ANTLR parsers.
-type DefaultErrorStrategy struct {
- errorRecoveryMode bool
- lastErrorIndex int
- lastErrorStates *IntervalSet
-}
-
-var _ ErrorStrategy = &DefaultErrorStrategy{}
-
-func NewDefaultErrorStrategy() *DefaultErrorStrategy {
-
- d := new(DefaultErrorStrategy)
-
- // Indicates whether the error strategy is currently "recovering from an
- // error". This is used to suppress Reporting multiple error messages while
- // attempting to recover from a detected syntax error.
- //
- // @see //InErrorRecoveryMode
- //
- d.errorRecoveryMode = false
-
- // The index into the input stream where the last error occurred.
- // This is used to prevent infinite loops where an error is found
- // but no token is consumed during recovery...another error is found,
- // ad nauseam. This is a failsafe mechanism to guarantee that at least
- // one token/tree node is consumed for two errors.
- //
- d.lastErrorIndex = -1
- d.lastErrorStates = nil
- return d
-}
-
-//
The default implementation simply calls {@link //endErrorCondition} to
-// ensure that the handler is not in error recovery mode.
-func (d *DefaultErrorStrategy) reset(recognizer Parser) {
- d.endErrorCondition(recognizer)
-}
-
-// This method is called to enter error recovery mode when a recognition
-// exception is Reported.
-func (d *DefaultErrorStrategy) beginErrorCondition(_ Parser) {
- d.errorRecoveryMode = true
-}
-
-func (d *DefaultErrorStrategy) InErrorRecoveryMode(_ Parser) bool {
- return d.errorRecoveryMode
-}
-
-// This method is called to leave error recovery mode after recovering from
-// a recognition exception.
-func (d *DefaultErrorStrategy) endErrorCondition(_ Parser) {
- d.errorRecoveryMode = false
- d.lastErrorStates = nil
- d.lastErrorIndex = -1
-}
-
-// ReportMatch is the default implementation of error matching and simply calls endErrorCondition.
-func (d *DefaultErrorStrategy) ReportMatch(recognizer Parser) {
- d.endErrorCondition(recognizer)
-}
-
-// ReportError is the default implementation of error reporting.
-// It returns immediately if the handler is already
-// in error recovery mode. Otherwise, it calls [beginErrorCondition]
-// and dispatches the Reporting task based on the runtime type of e
-// according to the following table.
-//
-// [NoViableAltException] : Dispatches the call to [ReportNoViableAlternative]
-// [InputMisMatchException] : Dispatches the call to [ReportInputMisMatch]
-// [FailedPredicateException] : Dispatches the call to [ReportFailedPredicate]
-// All other types : Calls [NotifyErrorListeners] to Report the exception
-func (d *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) {
- // if we've already Reported an error and have not Matched a token
- // yet successfully, don't Report any errors.
- if d.InErrorRecoveryMode(recognizer) {
- return // don't Report spurious errors
- }
- d.beginErrorCondition(recognizer)
-
- switch t := e.(type) {
- default:
- fmt.Println("unknown recognition error type: " + reflect.TypeOf(e).Name())
- // fmt.Println(e.stack)
- recognizer.NotifyErrorListeners(e.GetMessage(), e.GetOffendingToken(), e)
- case *NoViableAltException:
- d.ReportNoViableAlternative(recognizer, t)
- case *InputMisMatchException:
- d.ReportInputMisMatch(recognizer, t)
- case *FailedPredicateException:
- d.ReportFailedPredicate(recognizer, t)
- }
-}
-
-// Recover is the default recovery implementation.
-// It reSynchronizes the parser by consuming tokens until we find one in the reSynchronization set -
-// loosely the set of tokens that can follow the current rule.
-func (d *DefaultErrorStrategy) Recover(recognizer Parser, _ RecognitionException) {
-
- if d.lastErrorIndex == recognizer.GetInputStream().Index() &&
- d.lastErrorStates != nil && d.lastErrorStates.contains(recognizer.GetState()) {
- // uh oh, another error at same token index and previously-Visited
- // state in ATN must be a case where LT(1) is in the recovery
- // token set so nothing got consumed. Consume a single token
- // at least to prevent an infinite loop d is a failsafe.
- recognizer.Consume()
- }
- d.lastErrorIndex = recognizer.GetInputStream().Index()
- if d.lastErrorStates == nil {
- d.lastErrorStates = NewIntervalSet()
- }
- d.lastErrorStates.addOne(recognizer.GetState())
- followSet := d.GetErrorRecoverySet(recognizer)
- d.consumeUntil(recognizer, followSet)
-}
-
-// Sync is the default implementation of error strategy synchronization.
-//
-// This Sync makes sure that the current lookahead symbol is consistent with what were expecting
-// at this point in the [ATN]. You can call this anytime but ANTLR only
-// generates code to check before sub-rules/loops and each iteration.
-//
-// Implements [Jim Idle]'s magic Sync mechanism in closures and optional
-// sub-rules. E.g.:
-//
-// a : Sync ( stuff Sync )*
-// Sync : {consume to what can follow Sync}
-//
-// At the start of a sub-rule upon error, Sync performs single
-// token deletion, if possible. If it can't do that, it bails on the current
-// rule and uses the default error recovery, which consumes until the
-// reSynchronization set of the current rule.
-//
-// If the sub-rule is optional
-//
-// ({@code (...)?}, {@code (...)*},
-//
-// or a block with an empty alternative), then the expected set includes what follows
-// the sub-rule.
-//
-// During loop iteration, it consumes until it sees a token that can start a
-// sub-rule or what follows loop. Yes, that is pretty aggressive. We opt to
-// stay in the loop as long as possible.
-//
-// # Origins
-//
-// Previous versions of ANTLR did a poor job of their recovery within loops.
-// A single mismatch token or missing token would force the parser to bail
-// out of the entire rules surrounding the loop. So, for rule:
-//
-// classfunc : 'class' ID '{' member* '}'
-//
-// input with an extra token between members would force the parser to
-// consume until it found the next class definition rather than the next
-// member definition of the current class.
-//
-// This functionality cost a bit of effort because the parser has to
-// compare the token set at the start of the loop and at each iteration. If for
-// some reason speed is suffering for you, you can turn off this
-// functionality by simply overriding this method as empty:
-//
-// { }
-//
-// [Jim Idle]: https://github.com/jimidle
-func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
- // If already recovering, don't try to Sync
- if d.InErrorRecoveryMode(recognizer) {
- return
- }
-
- s := recognizer.GetInterpreter().atn.states[recognizer.GetState()]
- la := recognizer.GetTokenStream().LA(1)
-
- // try cheaper subset first might get lucky. seems to shave a wee bit off
- nextTokens := recognizer.GetATN().NextTokens(s, nil)
- if nextTokens.contains(TokenEpsilon) || nextTokens.contains(la) {
- return
- }
-
- switch s.GetStateType() {
- case ATNStateBlockStart, ATNStateStarBlockStart, ATNStatePlusBlockStart, ATNStateStarLoopEntry:
- // Report error and recover if possible
- if d.SingleTokenDeletion(recognizer) != nil {
- return
- }
- recognizer.SetError(NewInputMisMatchException(recognizer))
- case ATNStatePlusLoopBack, ATNStateStarLoopBack:
- d.ReportUnwantedToken(recognizer)
- expecting := NewIntervalSet()
- expecting.addSet(recognizer.GetExpectedTokens())
- whatFollowsLoopIterationOrRule := expecting.addSet(d.GetErrorRecoverySet(recognizer))
- d.consumeUntil(recognizer, whatFollowsLoopIterationOrRule)
- default:
- // do nothing if we can't identify the exact kind of ATN state
- }
-}
-
-// ReportNoViableAlternative is called by [ReportError] when the exception is a [NoViableAltException].
-//
-// See also [ReportError]
-func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *NoViableAltException) {
- tokens := recognizer.GetTokenStream()
- var input string
- if tokens != nil {
- if e.startToken.GetTokenType() == TokenEOF {
- input = ""
- } else {
- input = tokens.GetTextFromTokens(e.startToken, e.offendingToken)
- }
- } else {
- input = ""
- }
- msg := "no viable alternative at input " + d.escapeWSAndQuote(input)
- recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
-}
-
-// ReportInputMisMatch is called by [ReportError] when the exception is an [InputMisMatchException]
-//
-// See also: [ReportError]
-func (d *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *InputMisMatchException) {
- msg := "mismatched input " + d.GetTokenErrorDisplay(e.offendingToken) +
- " expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
- recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
-}
-
-// ReportFailedPredicate is called by [ReportError] when the exception is a [FailedPredicateException].
-//
-// See also: [ReportError]
-func (d *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *FailedPredicateException) {
- ruleName := recognizer.GetRuleNames()[recognizer.GetParserRuleContext().GetRuleIndex()]
- msg := "rule " + ruleName + " " + e.message
- recognizer.NotifyErrorListeners(msg, e.offendingToken, e)
-}
-
-// ReportUnwantedToken is called to report a syntax error that requires the removal
-// of a token from the input stream. At the time d method is called, the
-// erroneous symbol is the current LT(1) symbol and has not yet been
-// removed from the input stream. When this method returns,
-// recognizer is in error recovery mode.
-//
-// This method is called when singleTokenDeletion identifies
-// single-token deletion as a viable recovery strategy for a mismatched
-// input error.
-//
-// The default implementation simply returns if the handler is already in
-// error recovery mode. Otherwise, it calls beginErrorCondition to
-// enter error recovery mode, followed by calling
-// [NotifyErrorListeners]
-func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) {
- if d.InErrorRecoveryMode(recognizer) {
- return
- }
- d.beginErrorCondition(recognizer)
- t := recognizer.GetCurrentToken()
- tokenName := d.GetTokenErrorDisplay(t)
- expecting := d.GetExpectedTokens(recognizer)
- msg := "extraneous input " + tokenName + " expecting " +
- expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false)
- recognizer.NotifyErrorListeners(msg, t, nil)
-}
-
-// ReportMissingToken is called to report a syntax error which requires the
-// insertion of a missing token into the input stream. At the time this
-// method is called, the missing token has not yet been inserted. When this
-// method returns, recognizer is in error recovery mode.
-//
-// This method is called when singleTokenInsertion identifies
-// single-token insertion as a viable recovery strategy for a mismatched
-// input error.
-//
-// The default implementation simply returns if the handler is already in
-// error recovery mode. Otherwise, it calls beginErrorCondition to
-// enter error recovery mode, followed by calling [NotifyErrorListeners]
-func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) {
- if d.InErrorRecoveryMode(recognizer) {
- return
- }
- d.beginErrorCondition(recognizer)
- t := recognizer.GetCurrentToken()
- expecting := d.GetExpectedTokens(recognizer)
- msg := "missing " + expecting.StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false) +
- " at " + d.GetTokenErrorDisplay(t)
- recognizer.NotifyErrorListeners(msg, t, nil)
-}
-
-// The RecoverInline default implementation attempts to recover from the mismatched input
-// by using single token insertion and deletion as described below. If the
-// recovery attempt fails, this method panics with [InputMisMatchException}.
-// TODO: Not sure that panic() is the right thing to do here - JI
-//
-// # EXTRA TOKEN (single token deletion)
-//
-// LA(1) is not what we are looking for. If LA(2) has the
-// right token, however, then assume LA(1) is some extra spurious
-// token and delete it. Then consume and return the next token (which was
-// the LA(2) token) as the successful result of the Match operation.
-//
-// # This recovery strategy is implemented by singleTokenDeletion
-//
-// # MISSING TOKEN (single token insertion)
-//
-// If current token -at LA(1) - is consistent with what could come
-// after the expected LA(1) token, then assume the token is missing
-// and use the parser's [TokenFactory] to create it on the fly. The
-// “insertion” is performed by returning the created token as the successful
-// result of the Match operation.
-//
-// This recovery strategy is implemented by [SingleTokenInsertion].
-//
-// # Example
-//
-// For example, Input i=(3 is clearly missing the ')'. When
-// the parser returns from the nested call to expr, it will have
-// call the chain:
-//
-// stat → expr → atom
-//
-// and it will be trying to Match the ')' at this point in the
-// derivation:
-//
-// : ID '=' '(' INT ')' ('+' atom)* ';'
-// ^
-//
-// The attempt to [Match] ')' will fail when it sees ';' and
-// call [RecoverInline]. To recover, it sees that LA(1)==';'
-// is in the set of tokens that can follow the ')' token reference
-// in rule atom. It can assume that you forgot the ')'.
-func (d *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token {
- // SINGLE TOKEN DELETION
- MatchedSymbol := d.SingleTokenDeletion(recognizer)
- if MatchedSymbol != nil {
- // we have deleted the extra token.
- // now, move past ttype token as if all were ok
- recognizer.Consume()
- return MatchedSymbol
- }
- // SINGLE TOKEN INSERTION
- if d.SingleTokenInsertion(recognizer) {
- return d.GetMissingSymbol(recognizer)
- }
- // even that didn't work must panic the exception
- recognizer.SetError(NewInputMisMatchException(recognizer))
- return nil
-}
-
-// SingleTokenInsertion implements the single-token insertion inline error recovery
-// strategy. It is called by [RecoverInline] if the single-token
-// deletion strategy fails to recover from the mismatched input. If this
-// method returns {@code true}, {@code recognizer} will be in error recovery
-// mode.
-//
-// This method determines whether single-token insertion is viable by
-// checking if the LA(1) input symbol could be successfully Matched
-// if it were instead the LA(2) symbol. If this method returns
-// {@code true}, the caller is responsible for creating and inserting a
-// token with the correct type to produce this behavior.
-//
-// This func returns true if single-token insertion is a viable recovery
-// strategy for the current mismatched input.
-func (d *DefaultErrorStrategy) SingleTokenInsertion(recognizer Parser) bool {
- currentSymbolType := recognizer.GetTokenStream().LA(1)
- // if current token is consistent with what could come after current
- // ATN state, then we know we're missing a token error recovery
- // is free to conjure up and insert the missing token
- atn := recognizer.GetInterpreter().atn
- currentState := atn.states[recognizer.GetState()]
- next := currentState.GetTransitions()[0].getTarget()
- expectingAtLL2 := atn.NextTokens(next, recognizer.GetParserRuleContext())
- if expectingAtLL2.contains(currentSymbolType) {
- d.ReportMissingToken(recognizer)
- return true
- }
-
- return false
-}
-
-// SingleTokenDeletion implements the single-token deletion inline error recovery
-// strategy. It is called by [RecoverInline] to attempt to recover
-// from mismatched input. If this method returns nil, the parser and error
-// handler state will not have changed. If this method returns non-nil,
-// recognizer will not be in error recovery mode since the
-// returned token was a successful Match.
-//
-// If the single-token deletion is successful, this method calls
-// [ReportUnwantedToken] to Report the error, followed by
-// [Consume] to actually “delete” the extraneous token. Then,
-// before returning, [ReportMatch] is called to signal a successful
-// Match.
-//
-// The func returns the successfully Matched [Token] instance if single-token
-// deletion successfully recovers from the mismatched input, otherwise nil.
-func (d *DefaultErrorStrategy) SingleTokenDeletion(recognizer Parser) Token {
- NextTokenType := recognizer.GetTokenStream().LA(2)
- expecting := d.GetExpectedTokens(recognizer)
- if expecting.contains(NextTokenType) {
- d.ReportUnwantedToken(recognizer)
- // print("recoverFromMisMatchedToken deleting " \
- // + str(recognizer.GetTokenStream().LT(1)) \
- // + " since " + str(recognizer.GetTokenStream().LT(2)) \
- // + " is what we want", file=sys.stderr)
- recognizer.Consume() // simply delete extra token
- // we want to return the token we're actually Matching
- MatchedSymbol := recognizer.GetCurrentToken()
- d.ReportMatch(recognizer) // we know current token is correct
- return MatchedSymbol
- }
-
- return nil
-}
-
-// GetMissingSymbol conjures up a missing token during error recovery.
-//
-// The recognizer attempts to recover from single missing
-// symbols. But, actions might refer to that missing symbol.
-// For example:
-//
-// x=ID {f($x)}.
-//
-// The action clearly assumes
-// that there has been an identifier Matched previously and that
-// $x points at that token. If that token is missing, but
-// the next token in the stream is what we want we assume that
-// this token is missing, and we keep going. Because we
-// have to return some token to replace the missing token,
-// we have to conjure one up. This method gives the user control
-// over the tokens returned for missing tokens. Mostly,
-// you will want to create something special for identifier
-// tokens. For literals such as '{' and ',', the default
-// action in the parser or tree parser works. It simply creates
-// a [CommonToken] of the appropriate type. The text will be the token name.
-// If you need to change which tokens must be created by the lexer,
-// override this method to create the appropriate tokens.
-func (d *DefaultErrorStrategy) GetMissingSymbol(recognizer Parser) Token {
- currentSymbol := recognizer.GetCurrentToken()
- expecting := d.GetExpectedTokens(recognizer)
- expectedTokenType := expecting.first()
- var tokenText string
-
- if expectedTokenType == TokenEOF {
- tokenText = ""
- } else {
- ln := recognizer.GetLiteralNames()
- if expectedTokenType > 0 && expectedTokenType < len(ln) {
- tokenText = ""
- } else {
- tokenText = "" // TODO: matches the JS impl
- }
- }
- current := currentSymbol
- lookback := recognizer.GetTokenStream().LT(-1)
- if current.GetTokenType() == TokenEOF && lookback != nil {
- current = lookback
- }
-
- tf := recognizer.GetTokenFactory()
-
- return tf.Create(current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
-}
-
-func (d *DefaultErrorStrategy) GetExpectedTokens(recognizer Parser) *IntervalSet {
- return recognizer.GetExpectedTokens()
-}
-
-// GetTokenErrorDisplay determines how a token should be displayed in an error message.
-// The default is to display just the text, but during development you might
-// want to have a lot of information spit out. Override this func in that case
-// to use t.String() (which, for [CommonToken], dumps everything about
-// the token). This is better than forcing you to override a method in
-// your token objects because you don't have to go modify your lexer
-// so that it creates a new type.
-func (d *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string {
- if t == nil {
- return ""
- }
- s := t.GetText()
- if s == "" {
- if t.GetTokenType() == TokenEOF {
- s = ""
- } else {
- s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
- }
- }
- return d.escapeWSAndQuote(s)
-}
-
-func (d *DefaultErrorStrategy) escapeWSAndQuote(s string) string {
- s = strings.Replace(s, "\t", "\\t", -1)
- s = strings.Replace(s, "\n", "\\n", -1)
- s = strings.Replace(s, "\r", "\\r", -1)
- return "'" + s + "'"
-}
-
-// GetErrorRecoverySet computes the error recovery set for the current rule. During
-// rule invocation, the parser pushes the set of tokens that can
-// follow that rule reference on the stack. This amounts to
-// computing FIRST of what follows the rule reference in the
-// enclosing rule. See LinearApproximator.FIRST().
-//
-// This local follow set only includes tokens
-// from within the rule i.e., the FIRST computation done by
-// ANTLR stops at the end of a rule.
-//
-// # Example
-//
-// When you find a "no viable alt exception", the input is not
-// consistent with any of the alternatives for rule r. The best
-// thing to do is to consume tokens until you see something that
-// can legally follow a call to r or any rule that called r.
-// You don't want the exact set of viable next tokens because the
-// input might just be missing a token--you might consume the
-// rest of the input looking for one of the missing tokens.
-//
-// Consider the grammar:
-//
-// a : '[' b ']'
-// | '(' b ')'
-// ;
-//
-// b : c '^' INT
-// ;
-//
-// c : ID
-// | INT
-// ;
-//
-// At each rule invocation, the set of tokens that could follow
-// that rule is pushed on a stack. Here are the various
-// context-sensitive follow sets:
-//
-// FOLLOW(b1_in_a) = FIRST(']') = ']'
-// FOLLOW(b2_in_a) = FIRST(')') = ')'
-// FOLLOW(c_in_b) = FIRST('^') = '^'
-//
-// Upon erroneous input “[]”, the call chain is
-//
-// a → b → c
-//
-// and, hence, the follow context stack is:
-//
-// Depth Follow set Start of rule execution
-// 0 a (from main())
-// 1 ']' b
-// 2 '^' c
-//
-// Notice that ')' is not included, because b would have to have
-// been called from a different context in rule a for ')' to be
-// included.
-//
-// For error recovery, we cannot consider FOLLOW(c)
-// (context-sensitive or otherwise). We need the combined set of
-// all context-sensitive FOLLOW sets - the set of all tokens that
-// could follow any reference in the call chain. We need to
-// reSync to one of those tokens. Note that FOLLOW(c)='^' and if
-// we reSync'd to that token, we'd consume until EOF. We need to
-// Sync to context-sensitive FOLLOWs for a, b, and c:
-//
-// {']','^'}
-//
-// In this case, for input "[]", LA(1) is ']' and in the set, so we would
-// not consume anything. After printing an error, rule c would
-// return normally. Rule b would not find the required '^' though.
-// At this point, it gets a mismatched token error and panics an
-// exception (since LA(1) is not in the viable following token
-// set). The rule exception handler tries to recover, but finds
-// the same recovery set and doesn't consume anything. Rule b
-// exits normally returning to rule a. Now it finds the ']' (and
-// with the successful Match exits errorRecovery mode).
-//
-// So, you can see that the parser walks up the call chain looking
-// for the token that was a member of the recovery set.
-//
-// Errors are not generated in errorRecovery mode.
-//
-// ANTLR's error recovery mechanism is based upon original ideas:
-//
-// [Algorithms + Data Structures = Programs] by Niklaus Wirth and
-// [A note on error recovery in recursive descent parsers].
-//
-// Later, Josef Grosch had some good ideas in [Efficient and Comfortable Error Recovery in Recursive Descent
-// Parsers]
-//
-// Like Grosch I implement context-sensitive FOLLOW sets that are combined at run-time upon error to avoid overhead
-// during parsing. Later, the runtime Sync was improved for loops/sub-rules see [Sync] docs
-//
-// [A note on error recovery in recursive descent parsers]: http://portal.acm.org/citation.cfm?id=947902.947905
-// [Algorithms + Data Structures = Programs]: https://t.ly/5QzgE
-// [Efficient and Comfortable Error Recovery in Recursive Descent Parsers]: ftp://www.cocolab.com/products/cocktail/doca4.ps/ell.ps.zip
-func (d *DefaultErrorStrategy) GetErrorRecoverySet(recognizer Parser) *IntervalSet {
- atn := recognizer.GetInterpreter().atn
- ctx := recognizer.GetParserRuleContext()
- recoverSet := NewIntervalSet()
- for ctx != nil && ctx.GetInvokingState() >= 0 {
- // compute what follows who invoked us
- invokingState := atn.states[ctx.GetInvokingState()]
- rt := invokingState.GetTransitions()[0]
- follow := atn.NextTokens(rt.(*RuleTransition).followState, nil)
- recoverSet.addSet(follow)
- ctx = ctx.GetParent().(ParserRuleContext)
- }
- recoverSet.removeOne(TokenEpsilon)
- return recoverSet
-}
-
-// Consume tokens until one Matches the given token set.//
-func (d *DefaultErrorStrategy) consumeUntil(recognizer Parser, set *IntervalSet) {
- ttype := recognizer.GetTokenStream().LA(1)
- for ttype != TokenEOF && !set.contains(ttype) {
- recognizer.Consume()
- ttype = recognizer.GetTokenStream().LA(1)
- }
-}
-
-// The BailErrorStrategy implementation of ANTLRErrorStrategy responds to syntax errors
-// by immediately canceling the parse operation with a
-// [ParseCancellationException]. The implementation ensures that the
-// [ParserRuleContext//exception] field is set for all parse tree nodes
-// that were not completed prior to encountering the error.
-//
-// This error strategy is useful in the following scenarios.
-//
-// - Two-stage parsing: This error strategy allows the first
-// stage of two-stage parsing to immediately terminate if an error is
-// encountered, and immediately fall back to the second stage. In addition to
-// avoiding wasted work by attempting to recover from errors here, the empty
-// implementation of [BailErrorStrategy.Sync] improves the performance of
-// the first stage.
-//
-// - Silent validation: When syntax errors are not being
-// Reported or logged, and the parse result is simply ignored if errors occur,
-// the [BailErrorStrategy] avoids wasting work on recovering from errors
-// when the result will be ignored either way.
-//
-// myparser.SetErrorHandler(NewBailErrorStrategy())
-//
-// See also: [Parser.SetErrorHandler(ANTLRErrorStrategy)]
-type BailErrorStrategy struct {
- *DefaultErrorStrategy
-}
-
-var _ ErrorStrategy = &BailErrorStrategy{}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewBailErrorStrategy() *BailErrorStrategy {
-
- b := new(BailErrorStrategy)
-
- b.DefaultErrorStrategy = NewDefaultErrorStrategy()
-
- return b
-}
-
-// Recover Instead of recovering from exception e, re-panic it wrapped
-// in a [ParseCancellationException] so it is not caught by the
-// rule func catches. Use Exception.GetCause() to get the
-// original [RecognitionException].
-func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) {
- context := recognizer.GetParserRuleContext()
- for context != nil {
- context.SetException(e)
- if parent, ok := context.GetParent().(ParserRuleContext); ok {
- context = parent
- } else {
- context = nil
- }
- }
- recognizer.SetError(NewParseCancellationException()) // TODO: we don't emit e properly
-}
-
-// RecoverInline makes sure we don't attempt to recover inline if the parser
-// successfully recovers, it won't panic an exception.
-func (b *BailErrorStrategy) RecoverInline(recognizer Parser) Token {
- b.Recover(recognizer, NewInputMisMatchException(recognizer))
-
- return nil
-}
-
-// Sync makes sure we don't attempt to recover from problems in sub-rules.
-func (b *BailErrorStrategy) Sync(_ Parser) {
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/errors.go b/vendor/github.com/antlr4-go/antlr/v4/errors.go
deleted file mode 100644
index 8f0f2f601..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/errors.go
+++ /dev/null
@@ -1,259 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-// The root of the ANTLR exception hierarchy. In general, ANTLR tracks just
-// 3 kinds of errors: prediction errors, failed predicate errors, and
-// mismatched input errors. In each case, the parser knows where it is
-// in the input, where it is in the ATN, the rule invocation stack,
-// and what kind of problem occurred.
-
-type RecognitionException interface {
- GetOffendingToken() Token
- GetMessage() string
- GetInputStream() IntStream
-}
-
-type BaseRecognitionException struct {
- message string
- recognizer Recognizer
- offendingToken Token
- offendingState int
- ctx RuleContext
- input IntStream
-}
-
-func NewBaseRecognitionException(message string, recognizer Recognizer, input IntStream, ctx RuleContext) *BaseRecognitionException {
-
- // todo
- // Error.call(this)
- //
- // if (!!Error.captureStackTrace) {
- // Error.captureStackTrace(this, RecognitionException)
- // } else {
- // stack := NewError().stack
- // }
- // TODO: may be able to use - "runtime" func Stack(buf []byte, all bool) int
-
- t := new(BaseRecognitionException)
-
- t.message = message
- t.recognizer = recognizer
- t.input = input
- t.ctx = ctx
-
- // The current Token when an error occurred. Since not all streams
- // support accessing symbols by index, we have to track the {@link Token}
- // instance itself.
- //
- t.offendingToken = nil
-
- // Get the ATN state number the parser was in at the time the error
- // occurred. For NoViableAltException and LexerNoViableAltException exceptions, this is the
- // DecisionState number. For others, it is the state whose outgoing edge we couldn't Match.
- //
- t.offendingState = -1
- if t.recognizer != nil {
- t.offendingState = t.recognizer.GetState()
- }
-
- return t
-}
-
-func (b *BaseRecognitionException) GetMessage() string {
- return b.message
-}
-
-func (b *BaseRecognitionException) GetOffendingToken() Token {
- return b.offendingToken
-}
-
-func (b *BaseRecognitionException) GetInputStream() IntStream {
- return b.input
-}
-
-//
If the state number is not known, b method returns -1.
-
-// getExpectedTokens gets the set of input symbols which could potentially follow the
-// previously Matched symbol at the time this exception was raised.
-//
-// If the set of expected tokens is not known and could not be computed,
-// this method returns nil.
-//
-// The func returns the set of token types that could potentially follow the current
-// state in the {ATN}, or nil if the information is not available.
-
-func (b *BaseRecognitionException) getExpectedTokens() *IntervalSet {
- if b.recognizer != nil {
- return b.recognizer.GetATN().getExpectedTokens(b.offendingState, b.ctx)
- }
-
- return nil
-}
-
-func (b *BaseRecognitionException) String() string {
- return b.message
-}
-
-type LexerNoViableAltException struct {
- *BaseRecognitionException
-
- startIndex int
- deadEndConfigs *ATNConfigSet
-}
-
-func NewLexerNoViableAltException(lexer Lexer, input CharStream, startIndex int, deadEndConfigs *ATNConfigSet) *LexerNoViableAltException {
-
- l := new(LexerNoViableAltException)
-
- l.BaseRecognitionException = NewBaseRecognitionException("", lexer, input, nil)
-
- l.startIndex = startIndex
- l.deadEndConfigs = deadEndConfigs
-
- return l
-}
-
-func (l *LexerNoViableAltException) String() string {
- symbol := ""
- if l.startIndex >= 0 && l.startIndex < l.input.Size() {
- symbol = l.input.(CharStream).GetTextFromInterval(NewInterval(l.startIndex, l.startIndex))
- }
- return "LexerNoViableAltException" + symbol
-}
-
-type NoViableAltException struct {
- *BaseRecognitionException
-
- startToken Token
- offendingToken Token
- ctx ParserRuleContext
- deadEndConfigs *ATNConfigSet
-}
-
-// NewNoViableAltException creates an exception indicating that the parser could not decide which of two or more paths
-// to take based upon the remaining input. It tracks the starting token
-// of the offending input and also knows where the parser was
-// in the various paths when the error.
-//
-// Reported by [ReportNoViableAlternative]
-func NewNoViableAltException(recognizer Parser, input TokenStream, startToken Token, offendingToken Token, deadEndConfigs *ATNConfigSet, ctx ParserRuleContext) *NoViableAltException {
-
- if ctx == nil {
- ctx = recognizer.GetParserRuleContext()
- }
-
- if offendingToken == nil {
- offendingToken = recognizer.GetCurrentToken()
- }
-
- if startToken == nil {
- startToken = recognizer.GetCurrentToken()
- }
-
- if input == nil {
- input = recognizer.GetInputStream().(TokenStream)
- }
-
- n := new(NoViableAltException)
- n.BaseRecognitionException = NewBaseRecognitionException("", recognizer, input, ctx)
-
- // Which configurations did we try at input.Index() that couldn't Match
- // input.LT(1)
- n.deadEndConfigs = deadEndConfigs
-
- // The token object at the start index the input stream might
- // not be buffering tokens so get a reference to it.
- //
- // At the time the error occurred, of course the stream needs to keep a
- // buffer of all the tokens, but later we might not have access to those.
- n.startToken = startToken
- n.offendingToken = offendingToken
-
- return n
-}
-
-type InputMisMatchException struct {
- *BaseRecognitionException
-}
-
-// NewInputMisMatchException creates an exception that signifies any kind of mismatched input exceptions such as
-// when the current input does not Match the expected token.
-func NewInputMisMatchException(recognizer Parser) *InputMisMatchException {
-
- i := new(InputMisMatchException)
- i.BaseRecognitionException = NewBaseRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
-
- i.offendingToken = recognizer.GetCurrentToken()
-
- return i
-
-}
-
-// FailedPredicateException indicates that a semantic predicate failed during validation. Validation of predicates
-// occurs when normally parsing the alternative just like Matching a token.
-// Disambiguating predicate evaluation occurs when we test a predicate during
-// prediction.
-type FailedPredicateException struct {
- *BaseRecognitionException
-
- ruleIndex int
- predicateIndex int
- predicate string
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewFailedPredicateException(recognizer Parser, predicate string, message string) *FailedPredicateException {
-
- f := new(FailedPredicateException)
-
- f.BaseRecognitionException = NewBaseRecognitionException(f.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
-
- s := recognizer.GetInterpreter().atn.states[recognizer.GetState()]
- trans := s.GetTransitions()[0]
- if trans2, ok := trans.(*PredicateTransition); ok {
- f.ruleIndex = trans2.ruleIndex
- f.predicateIndex = trans2.predIndex
- } else {
- f.ruleIndex = 0
- f.predicateIndex = 0
- }
- f.predicate = predicate
- f.offendingToken = recognizer.GetCurrentToken()
-
- return f
-}
-
-func (f *FailedPredicateException) formatMessage(predicate, message string) string {
- if message != "" {
- return message
- }
-
- return "failed predicate: {" + predicate + "}?"
-}
-
-type ParseCancellationException struct {
-}
-
-func (p ParseCancellationException) GetOffendingToken() Token {
- //TODO implement me
- panic("implement me")
-}
-
-func (p ParseCancellationException) GetMessage() string {
- //TODO implement me
- panic("implement me")
-}
-
-func (p ParseCancellationException) GetInputStream() IntStream {
- //TODO implement me
- panic("implement me")
-}
-
-func NewParseCancellationException() *ParseCancellationException {
- // Error.call(this)
- // Error.captureStackTrace(this, ParseCancellationException)
- return new(ParseCancellationException)
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/file_stream.go b/vendor/github.com/antlr4-go/antlr/v4/file_stream.go
deleted file mode 100644
index 5f65f809b..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/file_stream.go
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "bufio"
- "os"
-)
-
-// This is an InputStream that is loaded from a file all at once
-// when you construct the object.
-
-type FileStream struct {
- InputStream
- filename string
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewFileStream(fileName string) (*FileStream, error) {
-
- f, err := os.Open(fileName)
- if err != nil {
- return nil, err
- }
-
- defer func(f *os.File) {
- errF := f.Close()
- if errF != nil {
- }
- }(f)
-
- reader := bufio.NewReader(f)
- fInfo, err := f.Stat()
- if err != nil {
- return nil, err
- }
-
- fs := &FileStream{
- InputStream: InputStream{
- index: 0,
- name: fileName,
- },
- filename: fileName,
- }
-
- // Pre-build the buffer and read runes efficiently
- //
- fs.data = make([]rune, 0, fInfo.Size())
- for {
- r, _, err := reader.ReadRune()
- if err != nil {
- break
- }
- fs.data = append(fs.data, r)
- }
- fs.size = len(fs.data) // Size in runes
-
- // All done.
- //
- return fs, nil
-}
-
-func (f *FileStream) GetSourceName() string {
- return f.filename
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/input_stream.go b/vendor/github.com/antlr4-go/antlr/v4/input_stream.go
deleted file mode 100644
index b737fe85f..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/input_stream.go
+++ /dev/null
@@ -1,157 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "bufio"
- "io"
-)
-
-type InputStream struct {
- name string
- index int
- data []rune
- size int
-}
-
-// NewIoStream creates a new input stream from the given io.Reader reader.
-// Note that the reader is read completely into memory and so it must actually
-// have a stopping point - you cannot pass in a reader on an open-ended source such
-// as a socket for instance.
-func NewIoStream(reader io.Reader) *InputStream {
-
- rReader := bufio.NewReader(reader)
-
- is := &InputStream{
- name: "",
- index: 0,
- }
-
- // Pre-build the buffer and read runes reasonably efficiently given that
- // we don't exactly know how big the input is.
- //
- is.data = make([]rune, 0, 512)
- for {
- r, _, err := rReader.ReadRune()
- if err != nil {
- break
- }
- is.data = append(is.data, r)
- }
- is.size = len(is.data) // number of runes
- return is
-}
-
-// NewInputStream creates a new input stream from the given string
-func NewInputStream(data string) *InputStream {
-
- is := &InputStream{
- name: "",
- index: 0,
- data: []rune(data), // This is actually the most efficient way
- }
- is.size = len(is.data) // number of runes, but we could also use len(data), which is efficient too
- return is
-}
-
-func (is *InputStream) reset() {
- is.index = 0
-}
-
-// Consume moves the input pointer to the next character in the input stream
-func (is *InputStream) Consume() {
- if is.index >= is.size {
- // assert is.LA(1) == TokenEOF
- panic("cannot consume EOF")
- }
- is.index++
-}
-
-// LA returns the character at the given offset from the start of the input stream
-func (is *InputStream) LA(offset int) int {
-
- if offset == 0 {
- return 0 // nil
- }
- if offset < 0 {
- offset++ // e.g., translate LA(-1) to use offset=0
- }
- pos := is.index + offset - 1
-
- if pos < 0 || pos >= is.size { // invalid
- return TokenEOF
- }
-
- return int(is.data[pos])
-}
-
-// LT returns the character at the given offset from the start of the input stream
-func (is *InputStream) LT(offset int) int {
- return is.LA(offset)
-}
-
-// Index returns the current offset in to the input stream
-func (is *InputStream) Index() int {
- return is.index
-}
-
-// Size returns the total number of characters in the input stream
-func (is *InputStream) Size() int {
- return is.size
-}
-
-// Mark does nothing here as we have entire buffer
-func (is *InputStream) Mark() int {
- return -1
-}
-
-// Release does nothing here as we have entire buffer
-func (is *InputStream) Release(_ int) {
-}
-
-// Seek the input point to the provided index offset
-func (is *InputStream) Seek(index int) {
- if index <= is.index {
- is.index = index // just jump don't update stream state (line,...)
- return
- }
- // seek forward
- is.index = intMin(index, is.size)
-}
-
-// GetText returns the text from the input stream from the start to the stop index
-func (is *InputStream) GetText(start int, stop int) string {
- if stop >= is.size {
- stop = is.size - 1
- }
- if start >= is.size {
- return ""
- }
-
- return string(is.data[start : stop+1])
-}
-
-// GetTextFromTokens returns the text from the input stream from the first character of the start token to the last
-// character of the stop token
-func (is *InputStream) GetTextFromTokens(start, stop Token) string {
- if start != nil && stop != nil {
- return is.GetTextFromInterval(NewInterval(start.GetTokenIndex(), stop.GetTokenIndex()))
- }
-
- return ""
-}
-
-func (is *InputStream) GetTextFromInterval(i Interval) string {
- return is.GetText(i.Start, i.Stop)
-}
-
-func (*InputStream) GetSourceName() string {
- return ""
-}
-
-// String returns the entire input stream as a string
-func (is *InputStream) String() string {
- return string(is.data)
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/int_stream.go b/vendor/github.com/antlr4-go/antlr/v4/int_stream.go
deleted file mode 100644
index 4778878bd..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/int_stream.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-type IntStream interface {
- Consume()
- LA(int) int
- Mark() int
- Release(marker int)
- Index() int
- Seek(index int)
- Size() int
- GetSourceName() string
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/interval_set.go b/vendor/github.com/antlr4-go/antlr/v4/interval_set.go
deleted file mode 100644
index cc5066067..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/interval_set.go
+++ /dev/null
@@ -1,330 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "strconv"
- "strings"
-)
-
-type Interval struct {
- Start int
- Stop int
-}
-
-// NewInterval creates a new interval with the given start and stop values.
-func NewInterval(start, stop int) Interval {
- return Interval{
- Start: start,
- Stop: stop,
- }
-}
-
-// Contains returns true if the given item is contained within the interval.
-func (i Interval) Contains(item int) bool {
- return item >= i.Start && item < i.Stop
-}
-
-// String generates a string representation of the interval.
-func (i Interval) String() string {
- if i.Start == i.Stop-1 {
- return strconv.Itoa(i.Start)
- }
-
- return strconv.Itoa(i.Start) + ".." + strconv.Itoa(i.Stop-1)
-}
-
-// Length returns the length of the interval.
-func (i Interval) Length() int {
- return i.Stop - i.Start
-}
-
-// IntervalSet represents a collection of [Intervals], which may be read-only.
-type IntervalSet struct {
- intervals []Interval
- readOnly bool
-}
-
-// NewIntervalSet creates a new empty, writable, interval set.
-func NewIntervalSet() *IntervalSet {
-
- i := new(IntervalSet)
-
- i.intervals = nil
- i.readOnly = false
-
- return i
-}
-
-func (i *IntervalSet) Equals(other *IntervalSet) bool {
- if len(i.intervals) != len(other.intervals) {
- return false
- }
-
- for k, v := range i.intervals {
- if v.Start != other.intervals[k].Start || v.Stop != other.intervals[k].Stop {
- return false
- }
- }
-
- return true
-}
-
-func (i *IntervalSet) first() int {
- if len(i.intervals) == 0 {
- return TokenInvalidType
- }
-
- return i.intervals[0].Start
-}
-
-func (i *IntervalSet) addOne(v int) {
- i.addInterval(NewInterval(v, v+1))
-}
-
-func (i *IntervalSet) addRange(l, h int) {
- i.addInterval(NewInterval(l, h+1))
-}
-
-func (i *IntervalSet) addInterval(v Interval) {
- if i.intervals == nil {
- i.intervals = make([]Interval, 0)
- i.intervals = append(i.intervals, v)
- } else {
- // find insert pos
- for k, interval := range i.intervals {
- // distinct range -> insert
- if v.Stop < interval.Start {
- i.intervals = append(i.intervals[0:k], append([]Interval{v}, i.intervals[k:]...)...)
- return
- } else if v.Stop == interval.Start {
- i.intervals[k].Start = v.Start
- return
- } else if v.Start <= interval.Stop {
- i.intervals[k] = NewInterval(intMin(interval.Start, v.Start), intMax(interval.Stop, v.Stop))
-
- // if not applying to end, merge potential overlaps
- if k < len(i.intervals)-1 {
- l := i.intervals[k]
- r := i.intervals[k+1]
- // if r contained in l
- if l.Stop >= r.Stop {
- i.intervals = append(i.intervals[0:k+1], i.intervals[k+2:]...)
- } else if l.Stop >= r.Start { // partial overlap
- i.intervals[k] = NewInterval(l.Start, r.Stop)
- i.intervals = append(i.intervals[0:k+1], i.intervals[k+2:]...)
- }
- }
- return
- }
- }
- // greater than any exiting
- i.intervals = append(i.intervals, v)
- }
-}
-
-func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
- if other.intervals != nil {
- for k := 0; k < len(other.intervals); k++ {
- i2 := other.intervals[k]
- i.addInterval(NewInterval(i2.Start, i2.Stop))
- }
- }
- return i
-}
-
-func (i *IntervalSet) complement(start int, stop int) *IntervalSet {
- result := NewIntervalSet()
- result.addInterval(NewInterval(start, stop+1))
- for j := 0; j < len(i.intervals); j++ {
- result.removeRange(i.intervals[j])
- }
- return result
-}
-
-func (i *IntervalSet) contains(item int) bool {
- if i.intervals == nil {
- return false
- }
- for k := 0; k < len(i.intervals); k++ {
- if i.intervals[k].Contains(item) {
- return true
- }
- }
- return false
-}
-
-func (i *IntervalSet) length() int {
- iLen := 0
-
- for _, v := range i.intervals {
- iLen += v.Length()
- }
-
- return iLen
-}
-
-func (i *IntervalSet) removeRange(v Interval) {
- if v.Start == v.Stop-1 {
- i.removeOne(v.Start)
- } else if i.intervals != nil {
- k := 0
- for n := 0; n < len(i.intervals); n++ {
- ni := i.intervals[k]
- // intervals are ordered
- if v.Stop <= ni.Start {
- return
- } else if v.Start > ni.Start && v.Stop < ni.Stop {
- i.intervals[k] = NewInterval(ni.Start, v.Start)
- x := NewInterval(v.Stop, ni.Stop)
- // i.intervals.splice(k, 0, x)
- i.intervals = append(i.intervals[0:k], append([]Interval{x}, i.intervals[k:]...)...)
- return
- } else if v.Start <= ni.Start && v.Stop >= ni.Stop {
- // i.intervals.splice(k, 1)
- i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
- k = k - 1 // need another pass
- } else if v.Start < ni.Stop {
- i.intervals[k] = NewInterval(ni.Start, v.Start)
- } else if v.Stop < ni.Stop {
- i.intervals[k] = NewInterval(v.Stop, ni.Stop)
- }
- k++
- }
- }
-}
-
-func (i *IntervalSet) removeOne(v int) {
- if i.intervals != nil {
- for k := 0; k < len(i.intervals); k++ {
- ki := i.intervals[k]
- // intervals i ordered
- if v < ki.Start {
- return
- } else if v == ki.Start && v == ki.Stop-1 {
- // i.intervals.splice(k, 1)
- i.intervals = append(i.intervals[0:k], i.intervals[k+1:]...)
- return
- } else if v == ki.Start {
- i.intervals[k] = NewInterval(ki.Start+1, ki.Stop)
- return
- } else if v == ki.Stop-1 {
- i.intervals[k] = NewInterval(ki.Start, ki.Stop-1)
- return
- } else if v < ki.Stop-1 {
- x := NewInterval(ki.Start, v)
- ki.Start = v + 1
- // i.intervals.splice(k, 0, x)
- i.intervals = append(i.intervals[0:k], append([]Interval{x}, i.intervals[k:]...)...)
- return
- }
- }
- }
-}
-
-func (i *IntervalSet) String() string {
- return i.StringVerbose(nil, nil, false)
-}
-
-func (i *IntervalSet) StringVerbose(literalNames []string, symbolicNames []string, elemsAreChar bool) string {
-
- if i.intervals == nil {
- return "{}"
- } else if literalNames != nil || symbolicNames != nil {
- return i.toTokenString(literalNames, symbolicNames)
- } else if elemsAreChar {
- return i.toCharString()
- }
-
- return i.toIndexString()
-}
-
-func (i *IntervalSet) GetIntervals() []Interval {
- return i.intervals
-}
-
-func (i *IntervalSet) toCharString() string {
- names := make([]string, len(i.intervals))
-
- var sb strings.Builder
-
- for j := 0; j < len(i.intervals); j++ {
- v := i.intervals[j]
- if v.Stop == v.Start+1 {
- if v.Start == TokenEOF {
- names = append(names, "")
- } else {
- sb.WriteByte('\'')
- sb.WriteRune(rune(v.Start))
- sb.WriteByte('\'')
- names = append(names, sb.String())
- sb.Reset()
- }
- } else {
- sb.WriteByte('\'')
- sb.WriteRune(rune(v.Start))
- sb.WriteString("'..'")
- sb.WriteRune(rune(v.Stop - 1))
- sb.WriteByte('\'')
- names = append(names, sb.String())
- sb.Reset()
- }
- }
- if len(names) > 1 {
- return "{" + strings.Join(names, ", ") + "}"
- }
-
- return names[0]
-}
-
-func (i *IntervalSet) toIndexString() string {
-
- names := make([]string, 0)
- for j := 0; j < len(i.intervals); j++ {
- v := i.intervals[j]
- if v.Stop == v.Start+1 {
- if v.Start == TokenEOF {
- names = append(names, "")
- } else {
- names = append(names, strconv.Itoa(v.Start))
- }
- } else {
- names = append(names, strconv.Itoa(v.Start)+".."+strconv.Itoa(v.Stop-1))
- }
- }
- if len(names) > 1 {
- return "{" + strings.Join(names, ", ") + "}"
- }
-
- return names[0]
-}
-
-func (i *IntervalSet) toTokenString(literalNames []string, symbolicNames []string) string {
- names := make([]string, 0)
- for _, v := range i.intervals {
- for j := v.Start; j < v.Stop; j++ {
- names = append(names, i.elementName(literalNames, symbolicNames, j))
- }
- }
- if len(names) > 1 {
- return "{" + strings.Join(names, ", ") + "}"
- }
-
- return names[0]
-}
-
-func (i *IntervalSet) elementName(literalNames []string, symbolicNames []string, a int) string {
- if a == TokenEOF {
- return ""
- } else if a == TokenEpsilon {
- return ""
- } else {
- if a < len(literalNames) && literalNames[a] != "" {
- return literalNames[a]
- }
-
- return symbolicNames[a]
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/jcollect.go b/vendor/github.com/antlr4-go/antlr/v4/jcollect.go
deleted file mode 100644
index ceccd96d2..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/jcollect.go
+++ /dev/null
@@ -1,685 +0,0 @@
-package antlr
-
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-import (
- "container/list"
- "runtime/debug"
- "sort"
- "sync"
-)
-
-// Collectable is an interface that a struct should implement if it is to be
-// usable as a key in these collections.
-type Collectable[T any] interface {
- Hash() int
- Equals(other Collectable[T]) bool
-}
-
-type Comparator[T any] interface {
- Hash1(o T) int
- Equals2(T, T) bool
-}
-
-type CollectionSource int
-type CollectionDescriptor struct {
- SybolicName string
- Description string
-}
-
-const (
- UnknownCollection CollectionSource = iota
- ATNConfigLookupCollection
- ATNStateCollection
- DFAStateCollection
- ATNConfigCollection
- PredictionContextCollection
- SemanticContextCollection
- ClosureBusyCollection
- PredictionVisitedCollection
- MergeCacheCollection
- PredictionContextCacheCollection
- AltSetCollection
- ReachSetCollection
-)
-
-var CollectionDescriptors = map[CollectionSource]CollectionDescriptor{
- UnknownCollection: {
- SybolicName: "UnknownCollection",
- Description: "Unknown collection type. Only used if the target author thought it was an unimportant collection.",
- },
- ATNConfigCollection: {
- SybolicName: "ATNConfigCollection",
- Description: "ATNConfig collection. Used to store the ATNConfigs for a particular state in the ATN." +
- "For instance, it is used to store the results of the closure() operation in the ATN.",
- },
- ATNConfigLookupCollection: {
- SybolicName: "ATNConfigLookupCollection",
- Description: "ATNConfigLookup collection. Used to store the ATNConfigs for a particular state in the ATN." +
- "This is used to prevent duplicating equivalent states in an ATNConfigurationSet.",
- },
- ATNStateCollection: {
- SybolicName: "ATNStateCollection",
- Description: "ATNState collection. This is used to store the states of the ATN.",
- },
- DFAStateCollection: {
- SybolicName: "DFAStateCollection",
- Description: "DFAState collection. This is used to store the states of the DFA.",
- },
- PredictionContextCollection: {
- SybolicName: "PredictionContextCollection",
- Description: "PredictionContext collection. This is used to store the prediction contexts of the ATN and cache computes.",
- },
- SemanticContextCollection: {
- SybolicName: "SemanticContextCollection",
- Description: "SemanticContext collection. This is used to store the semantic contexts of the ATN.",
- },
- ClosureBusyCollection: {
- SybolicName: "ClosureBusyCollection",
- Description: "ClosureBusy collection. This is used to check and prevent infinite recursion right recursive rules." +
- "It stores ATNConfigs that are currently being processed in the closure() operation.",
- },
- PredictionVisitedCollection: {
- SybolicName: "PredictionVisitedCollection",
- Description: "A map that records whether we have visited a particular context when searching through cached entries.",
- },
- MergeCacheCollection: {
- SybolicName: "MergeCacheCollection",
- Description: "A map that records whether we have already merged two particular contexts and can save effort by not repeating it.",
- },
- PredictionContextCacheCollection: {
- SybolicName: "PredictionContextCacheCollection",
- Description: "A map that records whether we have already created a particular context and can save effort by not computing it again.",
- },
- AltSetCollection: {
- SybolicName: "AltSetCollection",
- Description: "Used to eliminate duplicate alternatives in an ATN config set.",
- },
- ReachSetCollection: {
- SybolicName: "ReachSetCollection",
- Description: "Used as merge cache to prevent us needing to compute the merge of two states if we have already done it.",
- },
-}
-
-// JStore implements a container that allows the use of a struct to calculate the key
-// for a collection of values akin to map. This is not meant to be a full-blown HashMap but just
-// serve the needs of the ANTLR Go runtime.
-//
-// For ease of porting the logic of the runtime from the master target (Java), this collection
-// operates in a similar way to Java, in that it can use any struct that supplies a Hash() and Equals()
-// function as the key. The values are stored in a standard go map which internally is a form of hashmap
-// itself, the key for the go map is the hash supplied by the key object. The collection is able to deal with
-// hash conflicts by using a simple slice of values associated with the hash code indexed bucket. That isn't
-// particularly efficient, but it is simple, and it works. As this is specifically for the ANTLR runtime, and
-// we understand the requirements, then this is fine - this is not a general purpose collection.
-type JStore[T any, C Comparator[T]] struct {
- store map[int][]T
- len int
- comparator Comparator[T]
- stats *JStatRec
-}
-
-func NewJStore[T any, C Comparator[T]](comparator Comparator[T], cType CollectionSource, desc string) *JStore[T, C] {
-
- if comparator == nil {
- panic("comparator cannot be nil")
- }
-
- s := &JStore[T, C]{
- store: make(map[int][]T, 1),
- comparator: comparator,
- }
- if collectStats {
- s.stats = &JStatRec{
- Source: cType,
- Description: desc,
- }
-
- // Track where we created it from if we are being asked to do so
- if runtimeConfig.statsTraceStacks {
- s.stats.CreateStack = debug.Stack()
- }
- Statistics.AddJStatRec(s.stats)
- }
- return s
-}
-
-// Put will store given value in the collection. Note that the key for storage is generated from
-// the value itself - this is specifically because that is what ANTLR needs - this would not be useful
-// as any kind of general collection.
-//
-// If the key has a hash conflict, then the value will be added to the slice of values associated with the
-// hash, unless the value is already in the slice, in which case the existing value is returned. Value equivalence is
-// tested by calling the equals() method on the key.
-//
-// # If the given value is already present in the store, then the existing value is returned as v and exists is set to true
-//
-// If the given value is not present in the store, then the value is added to the store and returned as v and exists is set to false.
-func (s *JStore[T, C]) Put(value T) (v T, exists bool) {
-
- if collectStats {
- s.stats.Puts++
- }
- kh := s.comparator.Hash1(value)
-
- var hClash bool
- for _, v1 := range s.store[kh] {
- hClash = true
- if s.comparator.Equals2(value, v1) {
- if collectStats {
- s.stats.PutHits++
- s.stats.PutHashConflicts++
- }
- return v1, true
- }
- if collectStats {
- s.stats.PutMisses++
- }
- }
- if collectStats && hClash {
- s.stats.PutHashConflicts++
- }
- s.store[kh] = append(s.store[kh], value)
-
- if collectStats {
- if len(s.store[kh]) > s.stats.MaxSlotSize {
- s.stats.MaxSlotSize = len(s.store[kh])
- }
- }
- s.len++
- if collectStats {
- s.stats.CurSize = s.len
- if s.len > s.stats.MaxSize {
- s.stats.MaxSize = s.len
- }
- }
- return value, false
-}
-
-// Get will return the value associated with the key - the type of the key is the same type as the value
-// which would not generally be useful, but this is a specific thing for ANTLR where the key is
-// generated using the object we are going to store.
-func (s *JStore[T, C]) Get(key T) (T, bool) {
- if collectStats {
- s.stats.Gets++
- }
- kh := s.comparator.Hash1(key)
- var hClash bool
- for _, v := range s.store[kh] {
- hClash = true
- if s.comparator.Equals2(key, v) {
- if collectStats {
- s.stats.GetHits++
- s.stats.GetHashConflicts++
- }
- return v, true
- }
- if collectStats {
- s.stats.GetMisses++
- }
- }
- if collectStats {
- if hClash {
- s.stats.GetHashConflicts++
- }
- s.stats.GetNoEnt++
- }
- return key, false
-}
-
-// Contains returns true if the given key is present in the store
-func (s *JStore[T, C]) Contains(key T) bool {
- _, present := s.Get(key)
- return present
-}
-
-func (s *JStore[T, C]) SortedSlice(less func(i, j T) bool) []T {
- vs := make([]T, 0, len(s.store))
- for _, v := range s.store {
- vs = append(vs, v...)
- }
- sort.Slice(vs, func(i, j int) bool {
- return less(vs[i], vs[j])
- })
-
- return vs
-}
-
-func (s *JStore[T, C]) Each(f func(T) bool) {
- for _, e := range s.store {
- for _, v := range e {
- f(v)
- }
- }
-}
-
-func (s *JStore[T, C]) Len() int {
- return s.len
-}
-
-func (s *JStore[T, C]) Values() []T {
- vs := make([]T, 0, len(s.store))
- for _, e := range s.store {
- vs = append(vs, e...)
- }
- return vs
-}
-
-type entry[K, V any] struct {
- key K
- val V
-}
-
-type JMap[K, V any, C Comparator[K]] struct {
- store map[int][]*entry[K, V]
- len int
- comparator Comparator[K]
- stats *JStatRec
-}
-
-func NewJMap[K, V any, C Comparator[K]](comparator Comparator[K], cType CollectionSource, desc string) *JMap[K, V, C] {
- m := &JMap[K, V, C]{
- store: make(map[int][]*entry[K, V], 1),
- comparator: comparator,
- }
- if collectStats {
- m.stats = &JStatRec{
- Source: cType,
- Description: desc,
- }
- // Track where we created it from if we are being asked to do so
- if runtimeConfig.statsTraceStacks {
- m.stats.CreateStack = debug.Stack()
- }
- Statistics.AddJStatRec(m.stats)
- }
- return m
-}
-
-func (m *JMap[K, V, C]) Put(key K, val V) (V, bool) {
- if collectStats {
- m.stats.Puts++
- }
- kh := m.comparator.Hash1(key)
-
- var hClash bool
- for _, e := range m.store[kh] {
- hClash = true
- if m.comparator.Equals2(e.key, key) {
- if collectStats {
- m.stats.PutHits++
- m.stats.PutHashConflicts++
- }
- return e.val, true
- }
- if collectStats {
- m.stats.PutMisses++
- }
- }
- if collectStats {
- if hClash {
- m.stats.PutHashConflicts++
- }
- }
- m.store[kh] = append(m.store[kh], &entry[K, V]{key, val})
- if collectStats {
- if len(m.store[kh]) > m.stats.MaxSlotSize {
- m.stats.MaxSlotSize = len(m.store[kh])
- }
- }
- m.len++
- if collectStats {
- m.stats.CurSize = m.len
- if m.len > m.stats.MaxSize {
- m.stats.MaxSize = m.len
- }
- }
- return val, false
-}
-
-func (m *JMap[K, V, C]) Values() []V {
- vs := make([]V, 0, len(m.store))
- for _, e := range m.store {
- for _, v := range e {
- vs = append(vs, v.val)
- }
- }
- return vs
-}
-
-func (m *JMap[K, V, C]) Get(key K) (V, bool) {
- if collectStats {
- m.stats.Gets++
- }
- var none V
- kh := m.comparator.Hash1(key)
- var hClash bool
- for _, e := range m.store[kh] {
- hClash = true
- if m.comparator.Equals2(e.key, key) {
- if collectStats {
- m.stats.GetHits++
- m.stats.GetHashConflicts++
- }
- return e.val, true
- }
- if collectStats {
- m.stats.GetMisses++
- }
- }
- if collectStats {
- if hClash {
- m.stats.GetHashConflicts++
- }
- m.stats.GetNoEnt++
- }
- return none, false
-}
-
-func (m *JMap[K, V, C]) Len() int {
- return m.len
-}
-
-func (m *JMap[K, V, C]) Delete(key K) {
- kh := m.comparator.Hash1(key)
- for i, e := range m.store[kh] {
- if m.comparator.Equals2(e.key, key) {
- m.store[kh] = append(m.store[kh][:i], m.store[kh][i+1:]...)
- m.len--
- return
- }
- }
-}
-
-func (m *JMap[K, V, C]) Clear() {
- m.store = make(map[int][]*entry[K, V])
-}
-
-type JPCMap struct {
- store *JMap[*PredictionContext, *JMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]], *ObjEqComparator[*PredictionContext]]
- size int
- stats *JStatRec
-}
-
-func NewJPCMap(cType CollectionSource, desc string) *JPCMap {
- m := &JPCMap{
- store: NewJMap[*PredictionContext, *JMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]], *ObjEqComparator[*PredictionContext]](pContextEqInst, cType, desc),
- }
- if collectStats {
- m.stats = &JStatRec{
- Source: cType,
- Description: desc,
- }
- // Track where we created it from if we are being asked to do so
- if runtimeConfig.statsTraceStacks {
- m.stats.CreateStack = debug.Stack()
- }
- Statistics.AddJStatRec(m.stats)
- }
- return m
-}
-
-func (pcm *JPCMap) Get(k1, k2 *PredictionContext) (*PredictionContext, bool) {
- if collectStats {
- pcm.stats.Gets++
- }
- // Do we have a map stored by k1?
- //
- m2, present := pcm.store.Get(k1)
- if present {
- if collectStats {
- pcm.stats.GetHits++
- }
- // We found a map of values corresponding to k1, so now we need to look up k2 in that map
- //
- return m2.Get(k2)
- }
- if collectStats {
- pcm.stats.GetMisses++
- }
- return nil, false
-}
-
-func (pcm *JPCMap) Put(k1, k2, v *PredictionContext) {
-
- if collectStats {
- pcm.stats.Puts++
- }
- // First does a map already exist for k1?
- //
- if m2, present := pcm.store.Get(k1); present {
- if collectStats {
- pcm.stats.PutHits++
- }
- _, present = m2.Put(k2, v)
- if !present {
- pcm.size++
- if collectStats {
- pcm.stats.CurSize = pcm.size
- if pcm.size > pcm.stats.MaxSize {
- pcm.stats.MaxSize = pcm.size
- }
- }
- }
- } else {
- // No map found for k1, so we create it, add in our value, then store is
- //
- if collectStats {
- pcm.stats.PutMisses++
- m2 = NewJMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]](pContextEqInst, pcm.stats.Source, pcm.stats.Description+" map entry")
- } else {
- m2 = NewJMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]](pContextEqInst, PredictionContextCacheCollection, "map entry")
- }
-
- m2.Put(k2, v)
- pcm.store.Put(k1, m2)
- pcm.size++
- }
-}
-
-type JPCMap2 struct {
- store map[int][]JPCEntry
- size int
- stats *JStatRec
-}
-
-type JPCEntry struct {
- k1, k2, v *PredictionContext
-}
-
-func NewJPCMap2(cType CollectionSource, desc string) *JPCMap2 {
- m := &JPCMap2{
- store: make(map[int][]JPCEntry, 1000),
- }
- if collectStats {
- m.stats = &JStatRec{
- Source: cType,
- Description: desc,
- }
- // Track where we created it from if we are being asked to do so
- if runtimeConfig.statsTraceStacks {
- m.stats.CreateStack = debug.Stack()
- }
- Statistics.AddJStatRec(m.stats)
- }
- return m
-}
-
-func dHash(k1, k2 *PredictionContext) int {
- return k1.cachedHash*31 + k2.cachedHash
-}
-
-func (pcm *JPCMap2) Get(k1, k2 *PredictionContext) (*PredictionContext, bool) {
- if collectStats {
- pcm.stats.Gets++
- }
-
- h := dHash(k1, k2)
- var hClash bool
- for _, e := range pcm.store[h] {
- hClash = true
- if e.k1.Equals(k1) && e.k2.Equals(k2) {
- if collectStats {
- pcm.stats.GetHits++
- pcm.stats.GetHashConflicts++
- }
- return e.v, true
- }
- if collectStats {
- pcm.stats.GetMisses++
- }
- }
- if collectStats {
- if hClash {
- pcm.stats.GetHashConflicts++
- }
- pcm.stats.GetNoEnt++
- }
- return nil, false
-}
-
-func (pcm *JPCMap2) Put(k1, k2, v *PredictionContext) (*PredictionContext, bool) {
- if collectStats {
- pcm.stats.Puts++
- }
- h := dHash(k1, k2)
- var hClash bool
- for _, e := range pcm.store[h] {
- hClash = true
- if e.k1.Equals(k1) && e.k2.Equals(k2) {
- if collectStats {
- pcm.stats.PutHits++
- pcm.stats.PutHashConflicts++
- }
- return e.v, true
- }
- if collectStats {
- pcm.stats.PutMisses++
- }
- }
- if collectStats {
- if hClash {
- pcm.stats.PutHashConflicts++
- }
- }
- pcm.store[h] = append(pcm.store[h], JPCEntry{k1, k2, v})
- pcm.size++
- if collectStats {
- pcm.stats.CurSize = pcm.size
- if pcm.size > pcm.stats.MaxSize {
- pcm.stats.MaxSize = pcm.size
- }
- }
- return nil, false
-}
-
-type VisitEntry struct {
- k *PredictionContext
- v *PredictionContext
-}
-type VisitRecord struct {
- store map[*PredictionContext]*PredictionContext
- len int
- stats *JStatRec
-}
-
-type VisitList struct {
- cache *list.List
- lock sync.RWMutex
-}
-
-var visitListPool = VisitList{
- cache: list.New(),
- lock: sync.RWMutex{},
-}
-
-// NewVisitRecord returns a new VisitRecord instance from the pool if available.
-// Note that this "map" uses a pointer as a key because we are emulating the behavior of
-// IdentityHashMap in Java, which uses the `==` operator to compare whether the keys are equal,
-// which means is the key the same reference to an object rather than is it .equals() to another
-// object.
-func NewVisitRecord() *VisitRecord {
- visitListPool.lock.Lock()
- el := visitListPool.cache.Front()
- defer visitListPool.lock.Unlock()
- var vr *VisitRecord
- if el == nil {
- vr = &VisitRecord{
- store: make(map[*PredictionContext]*PredictionContext),
- }
- if collectStats {
- vr.stats = &JStatRec{
- Source: PredictionContextCacheCollection,
- Description: "VisitRecord",
- }
- // Track where we created it from if we are being asked to do so
- if runtimeConfig.statsTraceStacks {
- vr.stats.CreateStack = debug.Stack()
- }
- }
- } else {
- vr = el.Value.(*VisitRecord)
- visitListPool.cache.Remove(el)
- vr.store = make(map[*PredictionContext]*PredictionContext)
- }
- if collectStats {
- Statistics.AddJStatRec(vr.stats)
- }
- return vr
-}
-
-func (vr *VisitRecord) Release() {
- vr.len = 0
- vr.store = nil
- if collectStats {
- vr.stats.MaxSize = 0
- vr.stats.CurSize = 0
- vr.stats.Gets = 0
- vr.stats.GetHits = 0
- vr.stats.GetMisses = 0
- vr.stats.GetHashConflicts = 0
- vr.stats.GetNoEnt = 0
- vr.stats.Puts = 0
- vr.stats.PutHits = 0
- vr.stats.PutMisses = 0
- vr.stats.PutHashConflicts = 0
- vr.stats.MaxSlotSize = 0
- }
- visitListPool.lock.Lock()
- visitListPool.cache.PushBack(vr)
- visitListPool.lock.Unlock()
-}
-
-func (vr *VisitRecord) Get(k *PredictionContext) (*PredictionContext, bool) {
- if collectStats {
- vr.stats.Gets++
- }
- v := vr.store[k]
- if v != nil {
- if collectStats {
- vr.stats.GetHits++
- }
- return v, true
- }
- if collectStats {
- vr.stats.GetNoEnt++
- }
- return nil, false
-}
-
-func (vr *VisitRecord) Put(k, v *PredictionContext) (*PredictionContext, bool) {
- if collectStats {
- vr.stats.Puts++
- }
- vr.store[k] = v
- vr.len++
- if collectStats {
- vr.stats.CurSize = vr.len
- if vr.len > vr.stats.MaxSize {
- vr.stats.MaxSize = vr.len
- }
- }
- return v, false
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/lexer.go b/vendor/github.com/antlr4-go/antlr/v4/lexer.go
deleted file mode 100644
index 3c7896a91..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/lexer.go
+++ /dev/null
@@ -1,426 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strconv"
-)
-
-// A lexer is recognizer that draws input symbols from a character stream.
-// lexer grammars result in a subclass of this object. A Lexer object
-// uses simplified Match() and error recovery mechanisms in the interest
-// of speed.
-///
-
-type Lexer interface {
- TokenSource
- Recognizer
-
- Emit() Token
-
- SetChannel(int)
- PushMode(int)
- PopMode() int
- SetType(int)
- SetMode(int)
-}
-
-type BaseLexer struct {
- *BaseRecognizer
-
- Interpreter ILexerATNSimulator
- TokenStartCharIndex int
- TokenStartLine int
- TokenStartColumn int
- ActionType int
- Virt Lexer // The most derived lexer implementation. Allows virtual method calls.
-
- input CharStream
- factory TokenFactory
- tokenFactorySourcePair *TokenSourceCharStreamPair
- token Token
- hitEOF bool
- channel int
- thetype int
- modeStack IntStack
- mode int
- text string
-}
-
-func NewBaseLexer(input CharStream) *BaseLexer {
-
- lexer := new(BaseLexer)
-
- lexer.BaseRecognizer = NewBaseRecognizer()
-
- lexer.input = input
- lexer.factory = CommonTokenFactoryDEFAULT
- lexer.tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input}
-
- lexer.Virt = lexer
-
- lexer.Interpreter = nil // child classes must populate it
-
- // The goal of all lexer rules/methods is to create a token object.
- // l is an instance variable as multiple rules may collaborate to
- // create a single token. NextToken will return l object after
- // Matching lexer rule(s). If you subclass to allow multiple token
- // emissions, then set l to the last token to be Matched or
- // something non nil so that the auto token emit mechanism will not
- // emit another token.
- lexer.token = nil
-
- // What character index in the stream did the current token start at?
- // Needed, for example, to get the text for current token. Set at
- // the start of NextToken.
- lexer.TokenStartCharIndex = -1
-
- // The line on which the first character of the token resides///
- lexer.TokenStartLine = -1
-
- // The character position of first character within the line///
- lexer.TokenStartColumn = -1
-
- // Once we see EOF on char stream, next token will be EOF.
- // If you have DONE : EOF then you see DONE EOF.
- lexer.hitEOF = false
-
- // The channel number for the current token///
- lexer.channel = TokenDefaultChannel
-
- // The token type for the current token///
- lexer.thetype = TokenInvalidType
-
- lexer.modeStack = make([]int, 0)
- lexer.mode = LexerDefaultMode
-
- // You can set the text for the current token to override what is in
- // the input char buffer. Use setText() or can set l instance var.
- // /
- lexer.text = ""
-
- return lexer
-}
-
-const (
- LexerDefaultMode = 0
- LexerMore = -2
- LexerSkip = -3
-)
-
-//goland:noinspection GoUnusedConst
-const (
- LexerDefaultTokenChannel = TokenDefaultChannel
- LexerHidden = TokenHiddenChannel
- LexerMinCharValue = 0x0000
- LexerMaxCharValue = 0x10FFFF
-)
-
-func (b *BaseLexer) Reset() {
- // wack Lexer state variables
- if b.input != nil {
- b.input.Seek(0) // rewind the input
- }
- b.token = nil
- b.thetype = TokenInvalidType
- b.channel = TokenDefaultChannel
- b.TokenStartCharIndex = -1
- b.TokenStartColumn = -1
- b.TokenStartLine = -1
- b.text = ""
-
- b.hitEOF = false
- b.mode = LexerDefaultMode
- b.modeStack = make([]int, 0)
-
- b.Interpreter.reset()
-}
-
-func (b *BaseLexer) GetInterpreter() ILexerATNSimulator {
- return b.Interpreter
-}
-
-func (b *BaseLexer) GetInputStream() CharStream {
- return b.input
-}
-
-func (b *BaseLexer) GetSourceName() string {
- return b.GrammarFileName
-}
-
-func (b *BaseLexer) SetChannel(v int) {
- b.channel = v
-}
-
-func (b *BaseLexer) GetTokenFactory() TokenFactory {
- return b.factory
-}
-
-func (b *BaseLexer) setTokenFactory(f TokenFactory) {
- b.factory = f
-}
-
-func (b *BaseLexer) safeMatch() (ret int) {
- defer func() {
- if e := recover(); e != nil {
- if re, ok := e.(RecognitionException); ok {
- b.notifyListeners(re) // Report error
- b.Recover(re)
- ret = LexerSkip // default
- }
- }
- }()
-
- return b.Interpreter.Match(b.input, b.mode)
-}
-
-// NextToken returns a token from the lexer input source i.e., Match a token on the source char stream.
-func (b *BaseLexer) NextToken() Token {
- if b.input == nil {
- panic("NextToken requires a non-nil input stream.")
- }
-
- tokenStartMarker := b.input.Mark()
-
- // previously in finally block
- defer func() {
- // make sure we release marker after Match or
- // unbuffered char stream will keep buffering
- b.input.Release(tokenStartMarker)
- }()
-
- for {
- if b.hitEOF {
- b.EmitEOF()
- return b.token
- }
- b.token = nil
- b.channel = TokenDefaultChannel
- b.TokenStartCharIndex = b.input.Index()
- b.TokenStartColumn = b.Interpreter.GetCharPositionInLine()
- b.TokenStartLine = b.Interpreter.GetLine()
- b.text = ""
- continueOuter := false
- for {
- b.thetype = TokenInvalidType
-
- ttype := b.safeMatch()
-
- if b.input.LA(1) == TokenEOF {
- b.hitEOF = true
- }
- if b.thetype == TokenInvalidType {
- b.thetype = ttype
- }
- if b.thetype == LexerSkip {
- continueOuter = true
- break
- }
- if b.thetype != LexerMore {
- break
- }
- }
-
- if continueOuter {
- continue
- }
- if b.token == nil {
- b.Virt.Emit()
- }
- return b.token
- }
-}
-
-// Skip instructs the lexer to Skip creating a token for current lexer rule
-// and look for another token. [NextToken] knows to keep looking when
-// a lexer rule finishes with token set to [SKIPTOKEN]. Recall that
-// if token==nil at end of any token rule, it creates one for you
-// and emits it.
-func (b *BaseLexer) Skip() {
- b.thetype = LexerSkip
-}
-
-func (b *BaseLexer) More() {
- b.thetype = LexerMore
-}
-
-// SetMode changes the lexer to a new mode. The lexer will use this mode from hereon in and the rules for that mode
-// will be in force.
-func (b *BaseLexer) SetMode(m int) {
- b.mode = m
-}
-
-// PushMode saves the current lexer mode so that it can be restored later. See [PopMode], then sets the
-// current lexer mode to the supplied mode m.
-func (b *BaseLexer) PushMode(m int) {
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("pushMode " + strconv.Itoa(m))
- }
- b.modeStack.Push(b.mode)
- b.mode = m
-}
-
-// PopMode restores the lexer mode saved by a call to [PushMode]. It is a panic error if there is no saved mode to
-// return to.
-func (b *BaseLexer) PopMode() int {
- if len(b.modeStack) == 0 {
- panic("Empty Stack")
- }
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("popMode back to " + fmt.Sprint(b.modeStack[0:len(b.modeStack)-1]))
- }
- i, _ := b.modeStack.Pop()
- b.mode = i
- return b.mode
-}
-
-func (b *BaseLexer) inputStream() CharStream {
- return b.input
-}
-
-// SetInputStream resets the lexer input stream and associated lexer state.
-func (b *BaseLexer) SetInputStream(input CharStream) {
- b.input = nil
- b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
- b.Reset()
- b.input = input
- b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
-}
-
-func (b *BaseLexer) GetTokenSourceCharStreamPair() *TokenSourceCharStreamPair {
- return b.tokenFactorySourcePair
-}
-
-// EmitToken by default does not support multiple emits per [NextToken] invocation
-// for efficiency reasons. Subclass and override this func, [NextToken],
-// and [GetToken] (to push tokens into a list and pull from that list
-// rather than a single variable as this implementation does).
-func (b *BaseLexer) EmitToken(token Token) {
- b.token = token
-}
-
-// Emit is the standard method called to automatically emit a token at the
-// outermost lexical rule. The token object should point into the
-// char buffer start..stop. If there is a text override in 'text',
-// use that to set the token's text. Override this method to emit
-// custom [Token] objects or provide a new factory.
-// /
-func (b *BaseLexer) Emit() Token {
- t := b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.GetCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
- b.EmitToken(t)
- return t
-}
-
-// EmitEOF emits an EOF token. By default, this is the last token emitted
-func (b *BaseLexer) EmitEOF() Token {
- cpos := b.GetCharPositionInLine()
- lpos := b.GetLine()
- eof := b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
- b.EmitToken(eof)
- return eof
-}
-
-// GetCharPositionInLine returns the current position in the current line as far as the lexer is concerned.
-func (b *BaseLexer) GetCharPositionInLine() int {
- return b.Interpreter.GetCharPositionInLine()
-}
-
-func (b *BaseLexer) GetLine() int {
- return b.Interpreter.GetLine()
-}
-
-func (b *BaseLexer) GetType() int {
- return b.thetype
-}
-
-func (b *BaseLexer) SetType(t int) {
- b.thetype = t
-}
-
-// GetCharIndex returns the index of the current character of lookahead
-func (b *BaseLexer) GetCharIndex() int {
- return b.input.Index()
-}
-
-// GetText returns the text Matched so far for the current token or any text override.
-func (b *BaseLexer) GetText() string {
- if b.text != "" {
- return b.text
- }
-
- return b.Interpreter.GetText(b.input)
-}
-
-// SetText sets the complete text of this token; it wipes any previous changes to the text.
-func (b *BaseLexer) SetText(text string) {
- b.text = text
-}
-
-// GetATN returns the ATN used by the lexer.
-func (b *BaseLexer) GetATN() *ATN {
- return b.Interpreter.ATN()
-}
-
-// GetAllTokens returns a list of all [Token] objects in input char stream.
-// Forces a load of all tokens that can be made from the input char stream.
-//
-// Does not include EOF token.
-func (b *BaseLexer) GetAllTokens() []Token {
- vl := b.Virt
- tokens := make([]Token, 0)
- t := vl.NextToken()
- for t.GetTokenType() != TokenEOF {
- tokens = append(tokens, t)
- t = vl.NextToken()
- }
- return tokens
-}
-
-func (b *BaseLexer) notifyListeners(e RecognitionException) {
- start := b.TokenStartCharIndex
- stop := b.input.Index()
- text := b.input.GetTextFromInterval(NewInterval(start, stop))
- msg := "token recognition error at: '" + text + "'"
- listener := b.GetErrorListenerDispatch()
- listener.SyntaxError(b, nil, b.TokenStartLine, b.TokenStartColumn, msg, e)
-}
-
-func (b *BaseLexer) getErrorDisplayForChar(c rune) string {
- if c == TokenEOF {
- return ""
- } else if c == '\n' {
- return "\\n"
- } else if c == '\t' {
- return "\\t"
- } else if c == '\r' {
- return "\\r"
- } else {
- return string(c)
- }
-}
-
-func (b *BaseLexer) getCharErrorDisplay(c rune) string {
- return "'" + b.getErrorDisplayForChar(c) + "'"
-}
-
-// Recover can normally Match any char in its vocabulary after Matching
-// a token, so here we do the easy thing and just kill a character and hope
-// it all works out. You can instead use the rule invocation stack
-// to do sophisticated error recovery if you are in a fragment rule.
-//
-// In general, lexers should not need to recover and should have rules that cover any eventuality, such as
-// a character that makes no sense to the recognizer.
-func (b *BaseLexer) Recover(re RecognitionException) {
- if b.input.LA(1) != TokenEOF {
- if _, ok := re.(*LexerNoViableAltException); ok {
- // Skip a char and try again
- b.Interpreter.Consume(b.input)
- } else {
- // TODO: Do we lose character or line position information?
- b.input.Consume()
- }
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/lexer_action.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_action.go
deleted file mode 100644
index eaa7393e0..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/lexer_action.go
+++ /dev/null
@@ -1,452 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import "strconv"
-
-const (
- // LexerActionTypeChannel represents a [LexerChannelAction] action.
- LexerActionTypeChannel = 0
-
- // LexerActionTypeCustom represents a [LexerCustomAction] action.
- LexerActionTypeCustom = 1
-
- // LexerActionTypeMode represents a [LexerModeAction] action.
- LexerActionTypeMode = 2
-
- // LexerActionTypeMore represents a [LexerMoreAction] action.
- LexerActionTypeMore = 3
-
- // LexerActionTypePopMode represents a [LexerPopModeAction] action.
- LexerActionTypePopMode = 4
-
- // LexerActionTypePushMode represents a [LexerPushModeAction] action.
- LexerActionTypePushMode = 5
-
- // LexerActionTypeSkip represents a [LexerSkipAction] action.
- LexerActionTypeSkip = 6
-
- // LexerActionTypeType represents a [LexerTypeAction] action.
- LexerActionTypeType = 7
-)
-
-type LexerAction interface {
- getActionType() int
- getIsPositionDependent() bool
- execute(lexer Lexer)
- Hash() int
- Equals(other LexerAction) bool
-}
-
-type BaseLexerAction struct {
- actionType int
- isPositionDependent bool
-}
-
-func NewBaseLexerAction(action int) *BaseLexerAction {
- la := new(BaseLexerAction)
-
- la.actionType = action
- la.isPositionDependent = false
-
- return la
-}
-
-func (b *BaseLexerAction) execute(_ Lexer) {
- panic("Not implemented")
-}
-
-func (b *BaseLexerAction) getActionType() int {
- return b.actionType
-}
-
-func (b *BaseLexerAction) getIsPositionDependent() bool {
- return b.isPositionDependent
-}
-
-func (b *BaseLexerAction) Hash() int {
- h := murmurInit(0)
- h = murmurUpdate(h, b.actionType)
- return murmurFinish(h, 1)
-}
-
-func (b *BaseLexerAction) Equals(other LexerAction) bool {
- return b.actionType == other.getActionType()
-}
-
-// LexerSkipAction implements the [BaseLexerAction.Skip] lexer action by calling [Lexer.Skip].
-//
-// The Skip command does not have any parameters, so this action is
-// implemented as a singleton instance exposed by the [LexerSkipActionINSTANCE].
-type LexerSkipAction struct {
- *BaseLexerAction
-}
-
-func NewLexerSkipAction() *LexerSkipAction {
- la := new(LexerSkipAction)
- la.BaseLexerAction = NewBaseLexerAction(LexerActionTypeSkip)
- return la
-}
-
-// LexerSkipActionINSTANCE provides a singleton instance of this parameterless lexer action.
-var LexerSkipActionINSTANCE = NewLexerSkipAction()
-
-func (l *LexerSkipAction) execute(lexer Lexer) {
- lexer.Skip()
-}
-
-// String returns a string representation of the current [LexerSkipAction].
-func (l *LexerSkipAction) String() string {
- return "skip"
-}
-
-func (b *LexerSkipAction) Equals(other LexerAction) bool {
- return other.getActionType() == LexerActionTypeSkip
-}
-
-// Implements the {@code type} lexer action by calling {@link Lexer//setType}
-//
-// with the assigned type.
-type LexerTypeAction struct {
- *BaseLexerAction
-
- thetype int
-}
-
-func NewLexerTypeAction(thetype int) *LexerTypeAction {
- l := new(LexerTypeAction)
- l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
- l.thetype = thetype
- return l
-}
-
-func (l *LexerTypeAction) execute(lexer Lexer) {
- lexer.SetType(l.thetype)
-}
-
-func (l *LexerTypeAction) Hash() int {
- h := murmurInit(0)
- h = murmurUpdate(h, l.actionType)
- h = murmurUpdate(h, l.thetype)
- return murmurFinish(h, 2)
-}
-
-func (l *LexerTypeAction) Equals(other LexerAction) bool {
- if l == other {
- return true
- } else if _, ok := other.(*LexerTypeAction); !ok {
- return false
- } else {
- return l.thetype == other.(*LexerTypeAction).thetype
- }
-}
-
-func (l *LexerTypeAction) String() string {
- return "actionType(" + strconv.Itoa(l.thetype) + ")"
-}
-
-// LexerPushModeAction implements the pushMode lexer action by calling
-// [Lexer.pushMode] with the assigned mode.
-type LexerPushModeAction struct {
- *BaseLexerAction
- mode int
-}
-
-func NewLexerPushModeAction(mode int) *LexerPushModeAction {
-
- l := new(LexerPushModeAction)
- l.BaseLexerAction = NewBaseLexerAction(LexerActionTypePushMode)
-
- l.mode = mode
- return l
-}
-
-//
This action is implemented by calling {@link Lexer//pushMode} with the
-// value provided by {@link //getMode}.
-func (l *LexerPushModeAction) execute(lexer Lexer) {
- lexer.PushMode(l.mode)
-}
-
-func (l *LexerPushModeAction) Hash() int {
- h := murmurInit(0)
- h = murmurUpdate(h, l.actionType)
- h = murmurUpdate(h, l.mode)
- return murmurFinish(h, 2)
-}
-
-func (l *LexerPushModeAction) Equals(other LexerAction) bool {
- if l == other {
- return true
- } else if _, ok := other.(*LexerPushModeAction); !ok {
- return false
- } else {
- return l.mode == other.(*LexerPushModeAction).mode
- }
-}
-
-func (l *LexerPushModeAction) String() string {
- return "pushMode(" + strconv.Itoa(l.mode) + ")"
-}
-
-// LexerPopModeAction implements the popMode lexer action by calling [Lexer.popMode].
-//
-// The popMode command does not have any parameters, so this action is
-// implemented as a singleton instance exposed by [LexerPopModeActionINSTANCE]
-type LexerPopModeAction struct {
- *BaseLexerAction
-}
-
-func NewLexerPopModeAction() *LexerPopModeAction {
-
- l := new(LexerPopModeAction)
-
- l.BaseLexerAction = NewBaseLexerAction(LexerActionTypePopMode)
-
- return l
-}
-
-var LexerPopModeActionINSTANCE = NewLexerPopModeAction()
-
-//
This action is implemented by calling {@link Lexer//popMode}.
This action is implemented by calling {@link Lexer//popMode}.
-func (l *LexerMoreAction) execute(lexer Lexer) {
- lexer.More()
-}
-
-func (l *LexerMoreAction) String() string {
- return "more"
-}
-
-// LexerModeAction implements the mode lexer action by calling [Lexer.mode] with
-// the assigned mode.
-type LexerModeAction struct {
- *BaseLexerAction
- mode int
-}
-
-func NewLexerModeAction(mode int) *LexerModeAction {
- l := new(LexerModeAction)
- l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeMode)
- l.mode = mode
- return l
-}
-
-//
This action is implemented by calling {@link Lexer//mode} with the
-// value provided by {@link //getMode}.
-func (l *LexerModeAction) execute(lexer Lexer) {
- lexer.SetMode(l.mode)
-}
-
-func (l *LexerModeAction) Hash() int {
- h := murmurInit(0)
- h = murmurUpdate(h, l.actionType)
- h = murmurUpdate(h, l.mode)
- return murmurFinish(h, 2)
-}
-
-func (l *LexerModeAction) Equals(other LexerAction) bool {
- if l == other {
- return true
- } else if _, ok := other.(*LexerModeAction); !ok {
- return false
- } else {
- return l.mode == other.(*LexerModeAction).mode
- }
-}
-
-func (l *LexerModeAction) String() string {
- return "mode(" + strconv.Itoa(l.mode) + ")"
-}
-
-// Executes a custom lexer action by calling {@link Recognizer//action} with the
-// rule and action indexes assigned to the custom action. The implementation of
-// a custom action is added to the generated code for the lexer in an override
-// of {@link Recognizer//action} when the grammar is compiled.
-//
-//
This class may represent embedded actions created with the {...}
-// syntax in ANTLR 4, as well as actions created for lexer commands where the
-// command argument could not be evaluated when the grammar was compiled.
-
-// Constructs a custom lexer action with the specified rule and action
-// indexes.
-//
-// @param ruleIndex The rule index to use for calls to
-// {@link Recognizer//action}.
-// @param actionIndex The action index to use for calls to
-// {@link Recognizer//action}.
-
-type LexerCustomAction struct {
- *BaseLexerAction
- ruleIndex, actionIndex int
-}
-
-func NewLexerCustomAction(ruleIndex, actionIndex int) *LexerCustomAction {
- l := new(LexerCustomAction)
- l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeCustom)
- l.ruleIndex = ruleIndex
- l.actionIndex = actionIndex
- l.isPositionDependent = true
- return l
-}
-
-//
Custom actions are implemented by calling {@link Lexer//action} with the
-// appropriate rule and action indexes.
-func (l *LexerCustomAction) execute(lexer Lexer) {
- lexer.Action(nil, l.ruleIndex, l.actionIndex)
-}
-
-func (l *LexerCustomAction) Hash() int {
- h := murmurInit(0)
- h = murmurUpdate(h, l.actionType)
- h = murmurUpdate(h, l.ruleIndex)
- h = murmurUpdate(h, l.actionIndex)
- return murmurFinish(h, 3)
-}
-
-func (l *LexerCustomAction) Equals(other LexerAction) bool {
- if l == other {
- return true
- } else if _, ok := other.(*LexerCustomAction); !ok {
- return false
- } else {
- return l.ruleIndex == other.(*LexerCustomAction).ruleIndex &&
- l.actionIndex == other.(*LexerCustomAction).actionIndex
- }
-}
-
-// LexerChannelAction implements the channel lexer action by calling
-// [Lexer.setChannel] with the assigned channel.
-//
-// Constructs a new channel action with the specified channel value.
-type LexerChannelAction struct {
- *BaseLexerAction
- channel int
-}
-
-// NewLexerChannelAction creates a channel lexer action by calling
-// [Lexer.setChannel] with the assigned channel.
-//
-// Constructs a new channel action with the specified channel value.
-func NewLexerChannelAction(channel int) *LexerChannelAction {
- l := new(LexerChannelAction)
- l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeChannel)
- l.channel = channel
- return l
-}
-
-//
This action is implemented by calling {@link Lexer//setChannel} with the
-// value provided by {@link //getChannel}.
-func (l *LexerChannelAction) execute(lexer Lexer) {
- lexer.SetChannel(l.channel)
-}
-
-func (l *LexerChannelAction) Hash() int {
- h := murmurInit(0)
- h = murmurUpdate(h, l.actionType)
- h = murmurUpdate(h, l.channel)
- return murmurFinish(h, 2)
-}
-
-func (l *LexerChannelAction) Equals(other LexerAction) bool {
- if l == other {
- return true
- } else if _, ok := other.(*LexerChannelAction); !ok {
- return false
- } else {
- return l.channel == other.(*LexerChannelAction).channel
- }
-}
-
-func (l *LexerChannelAction) String() string {
- return "channel(" + strconv.Itoa(l.channel) + ")"
-}
-
-// This implementation of {@link LexerAction} is used for tracking input offsets
-// for position-dependent actions within a {@link LexerActionExecutor}.
-//
-//
This action is not serialized as part of the ATN, and is only required for
-// position-dependent lexer actions which appear at a location other than the
-// end of a rule. For more information about DFA optimizations employed for
-// lexer actions, see {@link LexerActionExecutor//append} and
-// {@link LexerActionExecutor//fixOffsetBeforeMatch}.
-
-type LexerIndexedCustomAction struct {
- *BaseLexerAction
- offset int
- lexerAction LexerAction
- isPositionDependent bool
-}
-
-// NewLexerIndexedCustomAction constructs a new indexed custom action by associating a character offset
-// with a [LexerAction].
-//
-// Note: This class is only required for lexer actions for which
-// [LexerAction.isPositionDependent] returns true.
-//
-// The offset points into the input [CharStream], relative to
-// the token start index, at which the specified lexerAction should be
-// executed.
-func NewLexerIndexedCustomAction(offset int, lexerAction LexerAction) *LexerIndexedCustomAction {
-
- l := new(LexerIndexedCustomAction)
- l.BaseLexerAction = NewBaseLexerAction(lexerAction.getActionType())
-
- l.offset = offset
- l.lexerAction = lexerAction
- l.isPositionDependent = true
-
- return l
-}
-
-//
This method calls {@link //execute} on the result of {@link //getAction}
-// using the provided {@code lexer}.
-func (l *LexerIndexedCustomAction) execute(lexer Lexer) {
- // assume the input stream position was properly set by the calling code
- l.lexerAction.execute(lexer)
-}
-
-func (l *LexerIndexedCustomAction) Hash() int {
- h := murmurInit(0)
- h = murmurUpdate(h, l.offset)
- h = murmurUpdate(h, l.lexerAction.Hash())
- return murmurFinish(h, 2)
-}
-
-func (l *LexerIndexedCustomAction) equals(other LexerAction) bool {
- if l == other {
- return true
- } else if _, ok := other.(*LexerIndexedCustomAction); !ok {
- return false
- } else {
- return l.offset == other.(*LexerIndexedCustomAction).offset &&
- l.lexerAction.Equals(other.(*LexerIndexedCustomAction).lexerAction)
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go
deleted file mode 100644
index dfc28c32b..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go
+++ /dev/null
@@ -1,173 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import "golang.org/x/exp/slices"
-
-// Represents an executor for a sequence of lexer actions which traversed during
-// the Matching operation of a lexer rule (token).
-//
-//
The executor tracks position information for position-dependent lexer actions
-// efficiently, ensuring that actions appearing only at the end of the rule do
-// not cause bloating of the {@link DFA} created for the lexer.
-
-type LexerActionExecutor struct {
- lexerActions []LexerAction
- cachedHash int
-}
-
-func NewLexerActionExecutor(lexerActions []LexerAction) *LexerActionExecutor {
-
- if lexerActions == nil {
- lexerActions = make([]LexerAction, 0)
- }
-
- l := new(LexerActionExecutor)
-
- l.lexerActions = lexerActions
-
- // Caches the result of {@link //hashCode} since the hash code is an element
- // of the performance-critical {@link ATNConfig//hashCode} operation.
- l.cachedHash = murmurInit(0)
- for _, a := range lexerActions {
- l.cachedHash = murmurUpdate(l.cachedHash, a.Hash())
- }
- l.cachedHash = murmurFinish(l.cachedHash, len(lexerActions))
-
- return l
-}
-
-// LexerActionExecutorappend creates a [LexerActionExecutor] which executes the actions for
-// the input [LexerActionExecutor] followed by a specified
-// [LexerAction].
-// TODO: This does not match the Java code
-func LexerActionExecutorappend(lexerActionExecutor *LexerActionExecutor, lexerAction LexerAction) *LexerActionExecutor {
- if lexerActionExecutor == nil {
- return NewLexerActionExecutor([]LexerAction{lexerAction})
- }
-
- return NewLexerActionExecutor(append(lexerActionExecutor.lexerActions, lexerAction))
-}
-
-// fixOffsetBeforeMatch creates a [LexerActionExecutor] which encodes the current offset
-// for position-dependent lexer actions.
-//
-// Normally, when the executor encounters lexer actions where
-// [LexerAction.isPositionDependent] returns true, it calls
-// [IntStream.Seek] on the input [CharStream] to set the input
-// position to the end of the current token. This behavior provides
-// for efficient [DFA] representation of lexer actions which appear at the end
-// of a lexer rule, even when the lexer rule Matches a variable number of
-// characters.
-//
-// Prior to traversing a Match transition in the [ATN], the current offset
-// from the token start index is assigned to all position-dependent lexer
-// actions which have not already been assigned a fixed offset. By storing
-// the offsets relative to the token start index, the [DFA] representation of
-// lexer actions which appear in the middle of tokens remains efficient due
-// to sharing among tokens of the same Length, regardless of their absolute
-// position in the input stream.
-//
-// If the current executor already has offsets assigned to all
-// position-dependent lexer actions, the method returns this instance.
-//
-// The offset is assigned to all position-dependent
-// lexer actions which do not already have offsets assigned.
-//
-// The func returns a [LexerActionExecutor] that stores input stream offsets
-// for all position-dependent lexer actions.
-func (l *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecutor {
- var updatedLexerActions []LexerAction
- for i := 0; i < len(l.lexerActions); i++ {
- _, ok := l.lexerActions[i].(*LexerIndexedCustomAction)
- if l.lexerActions[i].getIsPositionDependent() && !ok {
- if updatedLexerActions == nil {
- updatedLexerActions = make([]LexerAction, 0, len(l.lexerActions))
- updatedLexerActions = append(updatedLexerActions, l.lexerActions...)
- }
- updatedLexerActions[i] = NewLexerIndexedCustomAction(offset, l.lexerActions[i])
- }
- }
- if updatedLexerActions == nil {
- return l
- }
-
- return NewLexerActionExecutor(updatedLexerActions)
-}
-
-// Execute the actions encapsulated by l executor within the context of a
-// particular {@link Lexer}.
-//
-//
This method calls {@link IntStream//seek} to set the position of the
-// {@code input} {@link CharStream} prior to calling
-// {@link LexerAction//execute} on a position-dependent action. Before the
-// method returns, the input position will be restored to the same position
-// it was in when the method was invoked.
-//
-// @param lexer The lexer instance.
-// @param input The input stream which is the source for the current token.
-// When l method is called, the current {@link IntStream//index} for
-// {@code input} should be the start of the following token, i.e. 1
-// character past the end of the current token.
-// @param startIndex The token start index. This value may be passed to
-// {@link IntStream//seek} to set the {@code input} position to the beginning
-// of the token.
-// /
-func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex int) {
- requiresSeek := false
- stopIndex := input.Index()
-
- defer func() {
- if requiresSeek {
- input.Seek(stopIndex)
- }
- }()
-
- for i := 0; i < len(l.lexerActions); i++ {
- lexerAction := l.lexerActions[i]
- if la, ok := lexerAction.(*LexerIndexedCustomAction); ok {
- offset := la.offset
- input.Seek(startIndex + offset)
- lexerAction = la.lexerAction
- requiresSeek = (startIndex + offset) != stopIndex
- } else if lexerAction.getIsPositionDependent() {
- input.Seek(stopIndex)
- requiresSeek = false
- }
- lexerAction.execute(lexer)
- }
-}
-
-func (l *LexerActionExecutor) Hash() int {
- if l == nil {
- // TODO: Why is this here? l should not be nil
- return 61
- }
-
- // TODO: This is created from the action itself when the struct is created - will this be an issue at some point? Java uses the runtime assign hashcode
- return l.cachedHash
-}
-
-func (l *LexerActionExecutor) Equals(other interface{}) bool {
- if l == other {
- return true
- }
- othert, ok := other.(*LexerActionExecutor)
- if !ok {
- return false
- }
- if othert == nil {
- return false
- }
- if l.cachedHash != othert.cachedHash {
- return false
- }
- if len(l.lexerActions) != len(othert.lexerActions) {
- return false
- }
- return slices.EqualFunc(l.lexerActions, othert.lexerActions, func(i, j LexerAction) bool {
- return i.Equals(j)
- })
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go
deleted file mode 100644
index fe938b025..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go
+++ /dev/null
@@ -1,677 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strconv"
- "strings"
-)
-
-//goland:noinspection GoUnusedGlobalVariable
-var (
- LexerATNSimulatorMinDFAEdge = 0
- LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN
-
- LexerATNSimulatorMatchCalls = 0
-)
-
-type ILexerATNSimulator interface {
- IATNSimulator
-
- reset()
- Match(input CharStream, mode int) int
- GetCharPositionInLine() int
- GetLine() int
- GetText(input CharStream) string
- Consume(input CharStream)
-}
-
-type LexerATNSimulator struct {
- BaseATNSimulator
-
- recog Lexer
- predictionMode int
- mergeCache *JPCMap2
- startIndex int
- Line int
- CharPositionInLine int
- mode int
- prevAccept *SimState
- MatchCalls int
-}
-
-func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator {
- l := &LexerATNSimulator{
- BaseATNSimulator: BaseATNSimulator{
- atn: atn,
- sharedContextCache: sharedContextCache,
- },
- }
-
- l.decisionToDFA = decisionToDFA
- l.recog = recog
-
- // The current token's starting index into the character stream.
- // Shared across DFA to ATN simulation in case the ATN fails and the
- // DFA did not have a previous accept state. In l case, we use the
- // ATN-generated exception object.
- l.startIndex = -1
-
- // line number 1..n within the input
- l.Line = 1
-
- // The index of the character relative to the beginning of the line
- // 0..n-1
- l.CharPositionInLine = 0
-
- l.mode = LexerDefaultMode
-
- // Used during DFA/ATN exec to record the most recent accept configuration
- // info
- l.prevAccept = NewSimState()
-
- return l
-}
-
-func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
- l.CharPositionInLine = simulator.CharPositionInLine
- l.Line = simulator.Line
- l.mode = simulator.mode
- l.startIndex = simulator.startIndex
-}
-
-func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
- l.MatchCalls++
- l.mode = mode
- mark := input.Mark()
-
- defer func() {
- input.Release(mark)
- }()
-
- l.startIndex = input.Index()
- l.prevAccept.reset()
-
- dfa := l.decisionToDFA[mode]
-
- var s0 *DFAState
- l.atn.stateMu.RLock()
- s0 = dfa.getS0()
- l.atn.stateMu.RUnlock()
-
- if s0 == nil {
- return l.MatchATN(input)
- }
-
- return l.execATN(input, s0)
-}
-
-func (l *LexerATNSimulator) reset() {
- l.prevAccept.reset()
- l.startIndex = -1
- l.Line = 1
- l.CharPositionInLine = 0
- l.mode = LexerDefaultMode
-}
-
-func (l *LexerATNSimulator) MatchATN(input CharStream) int {
- startState := l.atn.modeToStartState[l.mode]
-
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String())
- }
- oldMode := l.mode
- s0Closure := l.computeStartState(input, startState)
- suppressEdge := s0Closure.hasSemanticContext
- s0Closure.hasSemanticContext = false
-
- next := l.addDFAState(s0Closure, suppressEdge)
-
- predict := l.execATN(input, next)
-
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("DFA after MatchATN: " + l.decisionToDFA[oldMode].ToLexerString())
- }
- return predict
-}
-
-func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
-
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("start state closure=" + ds0.configs.String())
- }
- if ds0.isAcceptState {
- // allow zero-Length tokens
- l.captureSimState(l.prevAccept, input, ds0)
- }
- t := input.LA(1)
- s := ds0 // s is current/from DFA state
-
- for { // while more work
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("execATN loop starting closure: " + s.configs.String())
- }
-
- // As we move src->trg, src->trg, we keep track of the previous trg to
- // avoid looking up the DFA state again, which is expensive.
- // If the previous target was already part of the DFA, we might
- // be able to avoid doing a reach operation upon t. If s!=nil,
- // it means that semantic predicates didn't prevent us from
- // creating a DFA state. Once we know s!=nil, we check to see if
- // the DFA state has an edge already for t. If so, we can just reuse
- // it's configuration set there's no point in re-computing it.
- // This is kind of like doing DFA simulation within the ATN
- // simulation because DFA simulation is really just a way to avoid
- // computing reach/closure sets. Technically, once we know that
- // we have a previously added DFA state, we could jump over to
- // the DFA simulator. But, that would mean popping back and forth
- // a lot and making things more complicated algorithmically.
- // This optimization makes a lot of sense for loops within DFA.
- // A character will take us back to an existing DFA state
- // that already has lots of edges out of it. e.g., .* in comments.
- target := l.getExistingTargetState(s, t)
- if target == nil {
- target = l.computeTargetState(input, s, t)
- // print("Computed:" + str(target))
- }
- if target == ATNSimulatorError {
- break
- }
- // If l is a consumable input element, make sure to consume before
- // capturing the accept state so the input index, line, and char
- // position accurately reflect the state of the interpreter at the
- // end of the token.
- if t != TokenEOF {
- l.Consume(input)
- }
- if target.isAcceptState {
- l.captureSimState(l.prevAccept, input, target)
- if t == TokenEOF {
- break
- }
- }
- t = input.LA(1)
- s = target // flip current DFA target becomes new src/from state
- }
-
- return l.failOrAccept(l.prevAccept, input, s.configs, t)
-}
-
-// Get an existing target state for an edge in the DFA. If the target state
-// for the edge has not yet been computed or is otherwise not available,
-// l method returns {@code nil}.
-//
-// @param s The current DFA state
-// @param t The next input symbol
-// @return The existing target DFA state for the given input symbol
-// {@code t}, or {@code nil} if the target state for l edge is not
-// already cached
-func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
- if t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
- return nil
- }
-
- l.atn.edgeMu.RLock()
- defer l.atn.edgeMu.RUnlock()
- if s.getEdges() == nil {
- return nil
- }
- target := s.getIthEdge(t - LexerATNSimulatorMinDFAEdge)
- if runtimeConfig.lexerATNSimulatorDebug && target != nil {
- fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
- }
- return target
-}
-
-// computeTargetState computes a target state for an edge in the [DFA], and attempt to add the
-// computed state and corresponding edge to the [DFA].
-//
-// The func returns the computed target [DFA] state for the given input symbol t.
-// If this does not lead to a valid [DFA] state, this method
-// returns ATNSimulatorError.
-func (l *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState {
- reach := NewOrderedATNConfigSet()
-
- // if we don't find an existing DFA state
- // Fill reach starting from closure, following t transitions
- l.getReachableConfigSet(input, s.configs, reach, t)
-
- if len(reach.configs) == 0 { // we got nowhere on t from s
- if !reach.hasSemanticContext {
- // we got nowhere on t, don't panic out l knowledge it'd
- // cause a fail-over from DFA later.
- l.addDFAEdge(s, t, ATNSimulatorError, nil)
- }
- // stop when we can't Match any more char
- return ATNSimulatorError
- }
- // Add an edge from s to target DFA found/created for reach
- return l.addDFAEdge(s, t, nil, reach)
-}
-
-func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach *ATNConfigSet, t int) int {
- if l.prevAccept.dfaState != nil {
- lexerActionExecutor := prevAccept.dfaState.lexerActionExecutor
- l.accept(input, lexerActionExecutor, l.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
- return prevAccept.dfaState.prediction
- }
-
- // if no accept and EOF is first char, return EOF
- if t == TokenEOF && input.Index() == l.startIndex {
- return TokenEOF
- }
-
- panic(NewLexerNoViableAltException(l.recog, input, l.startIndex, reach))
-}
-
-// getReachableConfigSet when given a starting configuration set, figures out all [ATN] configurations
-// we can reach upon input t.
-//
-// Parameter reach is a return parameter.
-func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *ATNConfigSet, reach *ATNConfigSet, t int) {
- // l is used to Skip processing for configs which have a lower priority
- // than a runtimeConfig that already reached an accept state for the same rule
- SkipAlt := ATNInvalidAltNumber
-
- for _, cfg := range closure.configs {
- currentAltReachedAcceptState := cfg.GetAlt() == SkipAlt
- if currentAltReachedAcceptState && cfg.passedThroughNonGreedyDecision {
- continue
- }
-
- if runtimeConfig.lexerATNSimulatorDebug {
-
- fmt.Printf("testing %s at %s\n", l.GetTokenName(t), cfg.String())
- }
-
- for _, trans := range cfg.GetState().GetTransitions() {
- target := l.getReachableTarget(trans, t)
- if target != nil {
- lexerActionExecutor := cfg.lexerActionExecutor
- if lexerActionExecutor != nil {
- lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - l.startIndex)
- }
- treatEOFAsEpsilon := t == TokenEOF
- config := NewLexerATNConfig3(cfg, target, lexerActionExecutor)
- if l.closure(input, config, reach,
- currentAltReachedAcceptState, true, treatEOFAsEpsilon) {
- // any remaining configs for l alt have a lower priority
- // than the one that just reached an accept state.
- SkipAlt = cfg.GetAlt()
- }
- }
- }
- }
-}
-
-func (l *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Printf("ACTION %v\n", lexerActionExecutor)
- }
- // seek to after last char in token
- input.Seek(index)
- l.Line = line
- l.CharPositionInLine = charPos
- if lexerActionExecutor != nil && l.recog != nil {
- lexerActionExecutor.execute(l.recog, input, startIndex)
- }
-}
-
-func (l *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState {
- if trans.Matches(t, 0, LexerMaxCharValue) {
- return trans.getTarget()
- }
-
- return nil
-}
-
-func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *ATNConfigSet {
- configs := NewOrderedATNConfigSet()
- for i := 0; i < len(p.GetTransitions()); i++ {
- target := p.GetTransitions()[i].getTarget()
- cfg := NewLexerATNConfig6(target, i+1, BasePredictionContextEMPTY)
- l.closure(input, cfg, configs, false, false, false)
- }
-
- return configs
-}
-
-// closure since the alternatives within any lexer decision are ordered by
-// preference, this method stops pursuing the closure as soon as an accept
-// state is reached. After the first accept state is reached by depth-first
-// search from runtimeConfig, all other (potentially reachable) states for
-// this rule would have a lower priority.
-//
-// The func returns true if an accept state is reached.
-func (l *LexerATNSimulator) closure(input CharStream, config *ATNConfig, configs *ATNConfigSet,
- currentAltReachedAcceptState, speculative, treatEOFAsEpsilon bool) bool {
-
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("closure(" + config.String() + ")")
- }
-
- _, ok := config.state.(*RuleStopState)
- if ok {
-
- if runtimeConfig.lexerATNSimulatorDebug {
- if l.recog != nil {
- fmt.Printf("closure at %s rule stop %s\n", l.recog.GetRuleNames()[config.state.GetRuleIndex()], config)
- } else {
- fmt.Printf("closure at rule stop %s\n", config)
- }
- }
-
- if config.context == nil || config.context.hasEmptyPath() {
- if config.context == nil || config.context.isEmpty() {
- configs.Add(config, nil)
- return true
- }
-
- configs.Add(NewLexerATNConfig2(config, config.state, BasePredictionContextEMPTY), nil)
- currentAltReachedAcceptState = true
- }
- if config.context != nil && !config.context.isEmpty() {
- for i := 0; i < config.context.length(); i++ {
- if config.context.getReturnState(i) != BasePredictionContextEmptyReturnState {
- newContext := config.context.GetParent(i) // "pop" return state
- returnState := l.atn.states[config.context.getReturnState(i)]
- cfg := NewLexerATNConfig2(config, returnState, newContext)
- currentAltReachedAcceptState = l.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEOFAsEpsilon)
- }
- }
- }
- return currentAltReachedAcceptState
- }
- // optimization
- if !config.state.GetEpsilonOnlyTransitions() {
- if !currentAltReachedAcceptState || !config.passedThroughNonGreedyDecision {
- configs.Add(config, nil)
- }
- }
- for j := 0; j < len(config.state.GetTransitions()); j++ {
- trans := config.state.GetTransitions()[j]
- cfg := l.getEpsilonTarget(input, config, trans, configs, speculative, treatEOFAsEpsilon)
- if cfg != nil {
- currentAltReachedAcceptState = l.closure(input, cfg, configs,
- currentAltReachedAcceptState, speculative, treatEOFAsEpsilon)
- }
- }
- return currentAltReachedAcceptState
-}
-
-// side-effect: can alter configs.hasSemanticContext
-func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *ATNConfig, trans Transition,
- configs *ATNConfigSet, speculative, treatEOFAsEpsilon bool) *ATNConfig {
-
- var cfg *ATNConfig
-
- if trans.getSerializationType() == TransitionRULE {
-
- rt := trans.(*RuleTransition)
- newContext := SingletonBasePredictionContextCreate(config.context, rt.followState.GetStateNumber())
- cfg = NewLexerATNConfig2(config, trans.getTarget(), newContext)
-
- } else if trans.getSerializationType() == TransitionPRECEDENCE {
- panic("Precedence predicates are not supported in lexers.")
- } else if trans.getSerializationType() == TransitionPREDICATE {
- // Track traversing semantic predicates. If we traverse,
- // we cannot add a DFA state for l "reach" computation
- // because the DFA would not test the predicate again in the
- // future. Rather than creating collections of semantic predicates
- // like v3 and testing them on prediction, v4 will test them on the
- // fly all the time using the ATN not the DFA. This is slower but
- // semantically it's not used that often. One of the key elements to
- // l predicate mechanism is not adding DFA states that see
- // predicates immediately afterwards in the ATN. For example,
-
- // a : ID {p1}? | ID {p2}?
-
- // should create the start state for rule 'a' (to save start state
- // competition), but should not create target of ID state. The
- // collection of ATN states the following ID references includes
- // states reached by traversing predicates. Since l is when we
- // test them, we cannot cash the DFA state target of ID.
-
- pt := trans.(*PredicateTransition)
-
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex))
- }
- configs.hasSemanticContext = true
- if l.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative) {
- cfg = NewLexerATNConfig4(config, trans.getTarget())
- }
- } else if trans.getSerializationType() == TransitionACTION {
- if config.context == nil || config.context.hasEmptyPath() {
- // execute actions anywhere in the start rule for a token.
- //
- // TODO: if the entry rule is invoked recursively, some
- // actions may be executed during the recursive call. The
- // problem can appear when hasEmptyPath() is true but
- // isEmpty() is false. In this case, the config needs to be
- // split into two contexts - one with just the empty path
- // and another with everything but the empty path.
- // Unfortunately, the current algorithm does not allow
- // getEpsilonTarget to return two configurations, so
- // additional modifications are needed before we can support
- // the split operation.
- lexerActionExecutor := LexerActionExecutorappend(config.lexerActionExecutor, l.atn.lexerActions[trans.(*ActionTransition).actionIndex])
- cfg = NewLexerATNConfig3(config, trans.getTarget(), lexerActionExecutor)
- } else {
- // ignore actions in referenced rules
- cfg = NewLexerATNConfig4(config, trans.getTarget())
- }
- } else if trans.getSerializationType() == TransitionEPSILON {
- cfg = NewLexerATNConfig4(config, trans.getTarget())
- } else if trans.getSerializationType() == TransitionATOM ||
- trans.getSerializationType() == TransitionRANGE ||
- trans.getSerializationType() == TransitionSET {
- if treatEOFAsEpsilon {
- if trans.Matches(TokenEOF, 0, LexerMaxCharValue) {
- cfg = NewLexerATNConfig4(config, trans.getTarget())
- }
- }
- }
- return cfg
-}
-
-// evaluatePredicate eEvaluates a predicate specified in the lexer.
-//
-// If speculative is true, this method was called before
-// [consume] for the Matched character. This method should call
-// [consume] before evaluating the predicate to ensure position
-// sensitive values, including [GetText], [GetLine],
-// and [GetColumn], properly reflect the current
-// lexer state. This method should restore input and the simulator
-// to the original state before returning, i.e. undo the actions made by the
-// call to [Consume].
-//
-// The func returns true if the specified predicate evaluates to true.
-func (l *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predIndex int, speculative bool) bool {
- // assume true if no recognizer was provided
- if l.recog == nil {
- return true
- }
- if !speculative {
- return l.recog.Sempred(nil, ruleIndex, predIndex)
- }
- savedcolumn := l.CharPositionInLine
- savedLine := l.Line
- index := input.Index()
- marker := input.Mark()
-
- defer func() {
- l.CharPositionInLine = savedcolumn
- l.Line = savedLine
- input.Seek(index)
- input.Release(marker)
- }()
-
- l.Consume(input)
- return l.recog.Sempred(nil, ruleIndex, predIndex)
-}
-
-func (l *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
- settings.index = input.Index()
- settings.line = l.Line
- settings.column = l.CharPositionInLine
- settings.dfaState = dfaState
-}
-
-func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfgs *ATNConfigSet) *DFAState {
- if to == nil && cfgs != nil {
- // leading to l call, ATNConfigSet.hasSemanticContext is used as a
- // marker indicating dynamic predicate evaluation makes l edge
- // dependent on the specific input sequence, so the static edge in the
- // DFA should be omitted. The target DFAState is still created since
- // execATN has the ability to reSynchronize with the DFA state cache
- // following the predicate evaluation step.
- //
- // TJP notes: next time through the DFA, we see a pred again and eval.
- // If that gets us to a previously created (but dangling) DFA
- // state, we can continue in pure DFA mode from there.
- //
- suppressEdge := cfgs.hasSemanticContext
- cfgs.hasSemanticContext = false
- to = l.addDFAState(cfgs, true)
-
- if suppressEdge {
- return to
- }
- }
- // add the edge
- if tk < LexerATNSimulatorMinDFAEdge || tk > LexerATNSimulatorMaxDFAEdge {
- // Only track edges within the DFA bounds
- return to
- }
- if runtimeConfig.lexerATNSimulatorDebug {
- fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
- }
- l.atn.edgeMu.Lock()
- defer l.atn.edgeMu.Unlock()
- if from.getEdges() == nil {
- // make room for tokens 1..n and -1 masquerading as index 0
- from.setEdges(make([]*DFAState, LexerATNSimulatorMaxDFAEdge-LexerATNSimulatorMinDFAEdge+1))
- }
- from.setIthEdge(tk-LexerATNSimulatorMinDFAEdge, to) // connect
-
- return to
-}
-
-// Add a NewDFA state if there isn't one with l set of
-// configurations already. This method also detects the first
-// configuration containing an ATN rule stop state. Later, when
-// traversing the DFA, we will know which rule to accept.
-func (l *LexerATNSimulator) addDFAState(configs *ATNConfigSet, suppressEdge bool) *DFAState {
-
- proposed := NewDFAState(-1, configs)
- var firstConfigWithRuleStopState *ATNConfig
-
- for _, cfg := range configs.configs {
- _, ok := cfg.GetState().(*RuleStopState)
-
- if ok {
- firstConfigWithRuleStopState = cfg
- break
- }
- }
- if firstConfigWithRuleStopState != nil {
- proposed.isAcceptState = true
- proposed.lexerActionExecutor = firstConfigWithRuleStopState.lexerActionExecutor
- proposed.setPrediction(l.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()])
- }
- dfa := l.decisionToDFA[l.mode]
-
- l.atn.stateMu.Lock()
- defer l.atn.stateMu.Unlock()
- existing, present := dfa.Get(proposed)
- if present {
-
- // This state was already present, so just return it.
- //
- proposed = existing
- } else {
-
- // We need to add the new state
- //
- proposed.stateNumber = dfa.Len()
- configs.readOnly = true
- configs.configLookup = nil // Not needed now
- proposed.configs = configs
- dfa.Put(proposed)
- }
- if !suppressEdge {
- dfa.setS0(proposed)
- }
- return proposed
-}
-
-func (l *LexerATNSimulator) getDFA(mode int) *DFA {
- return l.decisionToDFA[mode]
-}
-
-// GetText returns the text [Match]ed so far for the current token.
-func (l *LexerATNSimulator) GetText(input CharStream) string {
- // index is first lookahead char, don't include.
- return input.GetTextFromInterval(NewInterval(l.startIndex, input.Index()-1))
-}
-
-func (l *LexerATNSimulator) Consume(input CharStream) {
- curChar := input.LA(1)
- if curChar == int('\n') {
- l.Line++
- l.CharPositionInLine = 0
- } else {
- l.CharPositionInLine++
- }
- input.Consume()
-}
-
-func (l *LexerATNSimulator) GetCharPositionInLine() int {
- return l.CharPositionInLine
-}
-
-func (l *LexerATNSimulator) GetLine() int {
- return l.Line
-}
-
-func (l *LexerATNSimulator) GetTokenName(tt int) string {
- if tt == -1 {
- return "EOF"
- }
-
- var sb strings.Builder
- sb.Grow(6)
- sb.WriteByte('\'')
- sb.WriteRune(rune(tt))
- sb.WriteByte('\'')
-
- return sb.String()
-}
-
-func resetSimState(sim *SimState) {
- sim.index = -1
- sim.line = 0
- sim.column = -1
- sim.dfaState = nil
-}
-
-type SimState struct {
- index int
- line int
- column int
- dfaState *DFAState
-}
-
-func NewSimState() *SimState {
- s := new(SimState)
- resetSimState(s)
- return s
-}
-
-func (s *SimState) reset() {
- resetSimState(s)
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go b/vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go
deleted file mode 100644
index 4955ac876..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go
+++ /dev/null
@@ -1,218 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-type LL1Analyzer struct {
- atn *ATN
-}
-
-func NewLL1Analyzer(atn *ATN) *LL1Analyzer {
- la := new(LL1Analyzer)
- la.atn = atn
- return la
-}
-
-const (
- // LL1AnalyzerHitPred is a special value added to the lookahead sets to indicate that we hit
- // a predicate during analysis if
- //
- // seeThruPreds==false
- LL1AnalyzerHitPred = TokenInvalidType
-)
-
-// *
-// Calculates the SLL(1) expected lookahead set for each outgoing transition
-// of an {@link ATNState}. The returned array has one element for each
-// outgoing transition in {@code s}. If the closure from transition
-// i leads to a semantic predicate before Matching a symbol, the
-// element at index i of the result will be {@code nil}.
-//
-// @param s the ATN state
-// @return the expected symbols for each outgoing transition of {@code s}.
-func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
- if s == nil {
- return nil
- }
- count := len(s.GetTransitions())
- look := make([]*IntervalSet, count)
- for alt := 0; alt < count; alt++ {
-
- look[alt] = NewIntervalSet()
- lookBusy := NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, "LL1Analyzer.getDecisionLookahead for lookBusy")
- la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), false, false)
-
- // Wipe out lookahead for la alternative if we found nothing,
- // or we had a predicate when we !seeThruPreds
- if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHitPred) {
- look[alt] = nil
- }
- }
- return look
-}
-
-// Look computes the set of tokens that can follow s in the [ATN] in the
-// specified ctx.
-//
-// If ctx is nil and the end of the rule containing
-// s is reached, [EPSILON] is added to the result set.
-//
-// If ctx is not nil and the end of the outermost rule is
-// reached, [EOF] is added to the result set.
-//
-// Parameter s the ATN state, and stopState is the ATN state to stop at. This can be a
-// [BlockEndState] to detect epsilon paths through a closure.
-//
-// Parameter ctx is the complete parser context, or nil if the context
-// should be ignored
-//
-// The func returns the set of tokens that can follow s in the [ATN] in the
-// specified ctx.
-func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet {
- r := NewIntervalSet()
- var lookContext *PredictionContext
- if ctx != nil {
- lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
- }
- la.look1(s, stopState, lookContext, r, NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, "LL1Analyzer.Look for la.look1()"),
- NewBitSet(), true, true)
- return r
-}
-
-//*
-// Compute set of tokens that can follow {@code s} in the ATN in the
-// specified {@code ctx}.
-//
-//
If {@code ctx} is {@code nil} and {@code stopState} or the end of the
-// rule containing {@code s} is reached, {@link Token//EPSILON} is added to
-// the result set. If {@code ctx} is not {@code nil} and {@code addEOF} is
-// {@code true} and {@code stopState} or the end of the outermost rule is
-// reached, {@link Token//EOF} is added to the result set.
-//
-// @param s the ATN state.
-// @param stopState the ATN state to stop at. This can be a
-// {@link BlockEndState} to detect epsilon paths through a closure.
-// @param ctx The outer context, or {@code nil} if the outer context should
-// not be used.
-// @param look The result lookahead set.
-// @param lookBusy A set used for preventing epsilon closures in the ATN
-// from causing a stack overflow. Outside code should pass
-// {@code NewSet} for la argument.
-// @param calledRuleStack A set used for preventing left recursion in the
-// ATN from causing a stack overflow. Outside code should pass
-// {@code NewBitSet()} for la argument.
-// @param seeThruPreds {@code true} to true semantic predicates as
-// implicitly {@code true} and "see through them", otherwise {@code false}
-// to treat semantic predicates as opaque and add {@link //HitPred} to the
-// result if one is encountered.
-// @param addEOF Add {@link Token//EOF} to the result if the end of the
-// outermost context is reached. This parameter has no effect if {@code ctx}
-// is {@code nil}.
-
-func (la *LL1Analyzer) look2(_, stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]],
- calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) {
-
- returnState := la.atn.states[ctx.getReturnState(i)]
- la.look1(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
-
-}
-
-func (la *LL1Analyzer) look1(s, stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]], calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
-
- c := NewATNConfig6(s, 0, ctx)
-
- if lookBusy.Contains(c) {
- return
- }
-
- _, present := lookBusy.Put(c)
- if present {
- return
-
- }
- if s == stopState {
- if ctx == nil {
- look.addOne(TokenEpsilon)
- return
- } else if ctx.isEmpty() && addEOF {
- look.addOne(TokenEOF)
- return
- }
- }
-
- _, ok := s.(*RuleStopState)
-
- if ok {
- if ctx == nil {
- look.addOne(TokenEpsilon)
- return
- } else if ctx.isEmpty() && addEOF {
- look.addOne(TokenEOF)
- return
- }
-
- if ctx.pcType != PredictionContextEmpty {
- removed := calledRuleStack.contains(s.GetRuleIndex())
- defer func() {
- if removed {
- calledRuleStack.add(s.GetRuleIndex())
- }
- }()
- calledRuleStack.remove(s.GetRuleIndex())
- // run thru all possible stack tops in ctx
- for i := 0; i < ctx.length(); i++ {
- returnState := la.atn.states[ctx.getReturnState(i)]
- la.look2(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
- }
- return
- }
- }
-
- n := len(s.GetTransitions())
-
- for i := 0; i < n; i++ {
- t := s.GetTransitions()[i]
-
- if t1, ok := t.(*RuleTransition); ok {
- if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
- continue
- }
-
- newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
- la.look3(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
- } else if t2, ok := t.(AbstractPredicateTransition); ok {
- if seeThruPreds {
- la.look1(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
- } else {
- look.addOne(LL1AnalyzerHitPred)
- }
- } else if t.getIsEpsilon() {
- la.look1(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
- } else if _, ok := t.(*WildcardTransition); ok {
- look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
- } else {
- set := t.getLabel()
- if set != nil {
- if _, ok := t.(*NotSetTransition); ok {
- set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)
- }
- look.addSet(set)
- }
- }
- }
-}
-
-func (la *LL1Analyzer) look3(stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]],
- calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
-
- newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
-
- defer func() {
- calledRuleStack.remove(t1.getTarget().GetRuleIndex())
- }()
-
- calledRuleStack.add(t1.getTarget().GetRuleIndex())
- la.look1(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
-
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go b/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go
deleted file mode 100644
index 923c7b52c..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go
+++ /dev/null
@@ -1,47 +0,0 @@
-//go:build !antlr.stats
-
-package antlr
-
-// This file is compiled when the build configuration antlr.stats is not enabled.
-// which then allows the compiler to optimize out all the code that is not used.
-const collectStats = false
-
-// goRunStats is a dummy struct used when build configuration antlr.stats is not enabled.
-type goRunStats struct {
-}
-
-var Statistics = &goRunStats{}
-
-func (s *goRunStats) AddJStatRec(_ *JStatRec) {
- // Do nothing - compiler will optimize this out (hopefully)
-}
-
-func (s *goRunStats) CollectionAnomalies() {
- // Do nothing - compiler will optimize this out (hopefully)
-}
-
-func (s *goRunStats) Reset() {
- // Do nothing - compiler will optimize this out (hopefully)
-}
-
-func (s *goRunStats) Report(dir string, prefix string) error {
- // Do nothing - compiler will optimize this out (hopefully)
- return nil
-}
-
-func (s *goRunStats) Analyze() {
- // Do nothing - compiler will optimize this out (hopefully)
-}
-
-type statsOption func(*goRunStats) error
-
-func (s *goRunStats) Configure(options ...statsOption) error {
- // Do nothing - compiler will optimize this out (hopefully)
- return nil
-}
-
-func WithTopN(topN int) statsOption {
- return func(s *goRunStats) error {
- return nil
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/parser.go b/vendor/github.com/antlr4-go/antlr/v4/parser.go
deleted file mode 100644
index fb57ac15d..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/parser.go
+++ /dev/null
@@ -1,700 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strconv"
-)
-
-type Parser interface {
- Recognizer
-
- GetInterpreter() *ParserATNSimulator
-
- GetTokenStream() TokenStream
- GetTokenFactory() TokenFactory
- GetParserRuleContext() ParserRuleContext
- SetParserRuleContext(ParserRuleContext)
- Consume() Token
- GetParseListeners() []ParseTreeListener
-
- GetErrorHandler() ErrorStrategy
- SetErrorHandler(ErrorStrategy)
- GetInputStream() IntStream
- GetCurrentToken() Token
- GetExpectedTokens() *IntervalSet
- NotifyErrorListeners(string, Token, RecognitionException)
- IsExpectedToken(int) bool
- GetPrecedence() int
- GetRuleInvocationStack(ParserRuleContext) []string
-}
-
-type BaseParser struct {
- *BaseRecognizer
-
- Interpreter *ParserATNSimulator
- BuildParseTrees bool
-
- input TokenStream
- errHandler ErrorStrategy
- precedenceStack IntStack
- ctx ParserRuleContext
-
- tracer *TraceListener
- parseListeners []ParseTreeListener
- _SyntaxErrors int
-}
-
-// NewBaseParser contains all the parsing support code to embed in parsers. Essentially most of it is error
-// recovery stuff.
-//
-//goland:noinspection GoUnusedExportedFunction
-func NewBaseParser(input TokenStream) *BaseParser {
-
- p := new(BaseParser)
-
- p.BaseRecognizer = NewBaseRecognizer()
-
- // The input stream.
- p.input = nil
-
- // The error handling strategy for the parser. The default value is a new
- // instance of {@link DefaultErrorStrategy}.
- p.errHandler = NewDefaultErrorStrategy()
- p.precedenceStack = make([]int, 0)
- p.precedenceStack.Push(0)
-
- // The ParserRuleContext object for the currently executing rule.
- // p.is always non-nil during the parsing process.
- p.ctx = nil
-
- // Specifies whether the parser should construct a parse tree during
- // the parsing process. The default value is {@code true}.
- p.BuildParseTrees = true
-
- // When setTrace(true) is called, a reference to the
- // TraceListener is stored here, so it can be easily removed in a
- // later call to setTrace(false). The listener itself is
- // implemented as a parser listener so p.field is not directly used by
- // other parser methods.
- p.tracer = nil
-
- // The list of ParseTreeListener listeners registered to receive
- // events during the parse.
- p.parseListeners = nil
-
- // The number of syntax errors Reported during parsing. p.value is
- // incremented each time NotifyErrorListeners is called.
- p._SyntaxErrors = 0
- p.SetInputStream(input)
-
- return p
-}
-
-// This field maps from the serialized ATN string to the deserialized [ATN] with
-// bypass alternatives.
-//
-// [ATNDeserializationOptions.isGenerateRuleBypassTransitions]
-//
-//goland:noinspection GoUnusedGlobalVariable
-var bypassAltsAtnCache = make(map[string]int)
-
-// reset the parser's state//
-func (p *BaseParser) reset() {
- if p.input != nil {
- p.input.Seek(0)
- }
- p.errHandler.reset(p)
- p.ctx = nil
- p._SyntaxErrors = 0
- p.SetTrace(nil)
- p.precedenceStack = make([]int, 0)
- p.precedenceStack.Push(0)
- if p.Interpreter != nil {
- p.Interpreter.reset()
- }
-}
-
-func (p *BaseParser) GetErrorHandler() ErrorStrategy {
- return p.errHandler
-}
-
-func (p *BaseParser) SetErrorHandler(e ErrorStrategy) {
- p.errHandler = e
-}
-
-// Match current input symbol against {@code ttype}. If the symbol type
-// Matches, {@link ANTLRErrorStrategy//ReportMatch} and {@link //consume} are
-// called to complete the Match process.
-//
-//
If the symbol type does not Match,
-// {@link ANTLRErrorStrategy//recoverInline} is called on the current error
-// strategy to attempt recovery. If {@link //getBuildParseTree} is
-// {@code true} and the token index of the symbol returned by
-// {@link ANTLRErrorStrategy//recoverInline} is -1, the symbol is added to
-// the parse tree by calling {@link ParserRuleContext//addErrorNode}.
-//
-// @param ttype the token type to Match
-// @return the Matched symbol
-// @panics RecognitionException if the current input symbol did not Match
-// {@code ttype} and the error strategy could not recover from the
-// mismatched symbol
-
-func (p *BaseParser) Match(ttype int) Token {
-
- t := p.GetCurrentToken()
-
- if t.GetTokenType() == ttype {
- p.errHandler.ReportMatch(p)
- p.Consume()
- } else {
- t = p.errHandler.RecoverInline(p)
- if p.HasError() {
- return nil
- }
- if p.BuildParseTrees && t.GetTokenIndex() == -1 {
-
- // we must have conjured up a new token during single token
- // insertion if it's not the current symbol
- p.ctx.AddErrorNode(t)
- }
- }
-
- return t
-}
-
-// Match current input symbol as a wildcard. If the symbol type Matches
-// (i.e. has a value greater than 0), {@link ANTLRErrorStrategy//ReportMatch}
-// and {@link //consume} are called to complete the Match process.
-//
-//
If the symbol type does not Match,
-// {@link ANTLRErrorStrategy//recoverInline} is called on the current error
-// strategy to attempt recovery. If {@link //getBuildParseTree} is
-// {@code true} and the token index of the symbol returned by
-// {@link ANTLRErrorStrategy//recoverInline} is -1, the symbol is added to
-// the parse tree by calling {@link ParserRuleContext//addErrorNode}.
-//
-// @return the Matched symbol
-// @panics RecognitionException if the current input symbol did not Match
-// a wildcard and the error strategy could not recover from the mismatched
-// symbol
-
-func (p *BaseParser) MatchWildcard() Token {
- t := p.GetCurrentToken()
- if t.GetTokenType() > 0 {
- p.errHandler.ReportMatch(p)
- p.Consume()
- } else {
- t = p.errHandler.RecoverInline(p)
- if p.BuildParseTrees && t.GetTokenIndex() == -1 {
- // we must have conjured up a new token during single token
- // insertion if it's not the current symbol
- p.ctx.AddErrorNode(t)
- }
- }
- return t
-}
-
-func (p *BaseParser) GetParserRuleContext() ParserRuleContext {
- return p.ctx
-}
-
-func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) {
- p.ctx = v
-}
-
-func (p *BaseParser) GetParseListeners() []ParseTreeListener {
- if p.parseListeners == nil {
- return make([]ParseTreeListener, 0)
- }
- return p.parseListeners
-}
-
-// AddParseListener registers listener to receive events during the parsing process.
-//
-// To support output-preserving grammar transformations (including but not
-// limited to left-recursion removal, automated left-factoring, and
-// optimized code generation), calls to listener methods during the parse
-// may differ substantially from calls made by
-// [ParseTreeWalker.DEFAULT] used after the parse is complete. In
-// particular, rule entry and exit events may occur in a different order
-// during the parse than after the parser. In addition, calls to certain
-// rule entry methods may be omitted.
-//
-// With the following specific exceptions, calls to listener events are
-// deterministic, i.e. for identical input the calls to listener
-// methods will be the same.
-//
-// - Alterations to the grammar used to generate code may change the
-// behavior of the listener calls.
-// - Alterations to the command line options passed to ANTLR 4 when
-// generating the parser may change the behavior of the listener calls.
-// - Changing the version of the ANTLR Tool used to generate the parser
-// may change the behavior of the listener calls.
-func (p *BaseParser) AddParseListener(listener ParseTreeListener) {
- if listener == nil {
- panic("listener")
- }
- if p.parseListeners == nil {
- p.parseListeners = make([]ParseTreeListener, 0)
- }
- p.parseListeners = append(p.parseListeners, listener)
-}
-
-// RemoveParseListener removes listener from the list of parse listeners.
-//
-// If listener is nil or has not been added as a parse
-// listener, this func does nothing.
-func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) {
-
- if p.parseListeners != nil {
-
- idx := -1
- for i, v := range p.parseListeners {
- if v == listener {
- idx = i
- break
- }
- }
-
- if idx == -1 {
- return
- }
-
- // remove the listener from the slice
- p.parseListeners = append(p.parseListeners[0:idx], p.parseListeners[idx+1:]...)
-
- if len(p.parseListeners) == 0 {
- p.parseListeners = nil
- }
- }
-}
-
-// Remove all parse listeners.
-func (p *BaseParser) removeParseListeners() {
- p.parseListeners = nil
-}
-
-// TriggerEnterRuleEvent notifies all parse listeners of an enter rule event.
-func (p *BaseParser) TriggerEnterRuleEvent() {
- if p.parseListeners != nil {
- ctx := p.ctx
- for _, listener := range p.parseListeners {
- listener.EnterEveryRule(ctx)
- ctx.EnterRule(listener)
- }
- }
-}
-
-// TriggerExitRuleEvent notifies any parse listeners of an exit rule event.
-func (p *BaseParser) TriggerExitRuleEvent() {
- if p.parseListeners != nil {
- // reverse order walk of listeners
- ctx := p.ctx
- l := len(p.parseListeners) - 1
-
- for i := range p.parseListeners {
- listener := p.parseListeners[l-i]
- ctx.ExitRule(listener)
- listener.ExitEveryRule(ctx)
- }
- }
-}
-
-func (p *BaseParser) GetInterpreter() *ParserATNSimulator {
- return p.Interpreter
-}
-
-func (p *BaseParser) GetATN() *ATN {
- return p.Interpreter.atn
-}
-
-func (p *BaseParser) GetTokenFactory() TokenFactory {
- return p.input.GetTokenSource().GetTokenFactory()
-}
-
-// setTokenFactory is used to tell our token source and error strategy about a new way to create tokens.
-func (p *BaseParser) setTokenFactory(factory TokenFactory) {
- p.input.GetTokenSource().setTokenFactory(factory)
-}
-
-// GetATNWithBypassAlts - the ATN with bypass alternatives is expensive to create, so we create it
-// lazily.
-func (p *BaseParser) GetATNWithBypassAlts() {
-
- // TODO - Implement this?
- panic("Not implemented!")
-
- // serializedAtn := p.getSerializedATN()
- // if (serializedAtn == nil) {
- // panic("The current parser does not support an ATN with bypass alternatives.")
- // }
- // result := p.bypassAltsAtnCache[serializedAtn]
- // if (result == nil) {
- // deserializationOptions := NewATNDeserializationOptions(nil)
- // deserializationOptions.generateRuleBypassTransitions = true
- // result = NewATNDeserializer(deserializationOptions).deserialize(serializedAtn)
- // p.bypassAltsAtnCache[serializedAtn] = result
- // }
- // return result
-}
-
-// The preferred method of getting a tree pattern. For example, here's a
-// sample use:
-//
-//
-// ParseTree t = parser.expr()
-// ParseTreePattern p = parser.compileParseTreePattern("<ID>+0",
-// MyParser.RULE_expr)
-// ParseTreeMatch m = p.Match(t)
-// String id = m.Get("ID")
-//
-
-//goland:noinspection GoUnusedParameter
-func (p *BaseParser) compileParseTreePattern(pattern, patternRuleIndex, lexer Lexer) {
-
- panic("NewParseTreePatternMatcher not implemented!")
- //
- // if (lexer == nil) {
- // if (p.GetTokenStream() != nil) {
- // tokenSource := p.GetTokenStream().GetTokenSource()
- // if _, ok := tokenSource.(ILexer); ok {
- // lexer = tokenSource
- // }
- // }
- // }
- // if (lexer == nil) {
- // panic("Parser can't discover a lexer to use")
- // }
-
- // m := NewParseTreePatternMatcher(lexer, p)
- // return m.compile(pattern, patternRuleIndex)
-}
-
-func (p *BaseParser) GetInputStream() IntStream {
- return p.GetTokenStream()
-}
-
-func (p *BaseParser) SetInputStream(input TokenStream) {
- p.SetTokenStream(input)
-}
-
-func (p *BaseParser) GetTokenStream() TokenStream {
- return p.input
-}
-
-// SetTokenStream installs input as the token stream and resets the parser.
-func (p *BaseParser) SetTokenStream(input TokenStream) {
- p.input = nil
- p.reset()
- p.input = input
-}
-
-// GetCurrentToken returns the current token at LT(1).
-//
-// [Match] needs to return the current input symbol, which gets put
-// into the label for the associated token ref e.g., x=ID.
-func (p *BaseParser) GetCurrentToken() Token {
- return p.input.LT(1)
-}
-
-func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err RecognitionException) {
- if offendingToken == nil {
- offendingToken = p.GetCurrentToken()
- }
- p._SyntaxErrors++
- line := offendingToken.GetLine()
- column := offendingToken.GetColumn()
- listener := p.GetErrorListenerDispatch()
- listener.SyntaxError(p, offendingToken, line, column, msg, err)
-}
-
-func (p *BaseParser) Consume() Token {
- o := p.GetCurrentToken()
- if o.GetTokenType() != TokenEOF {
- p.GetInputStream().Consume()
- }
- hasListener := p.parseListeners != nil && len(p.parseListeners) > 0
- if p.BuildParseTrees || hasListener {
- if p.errHandler.InErrorRecoveryMode(p) {
- node := p.ctx.AddErrorNode(o)
- if p.parseListeners != nil {
- for _, l := range p.parseListeners {
- l.VisitErrorNode(node)
- }
- }
-
- } else {
- node := p.ctx.AddTokenNode(o)
- if p.parseListeners != nil {
- for _, l := range p.parseListeners {
- l.VisitTerminal(node)
- }
- }
- }
- // node.invokingState = p.state
- }
-
- return o
-}
-
-func (p *BaseParser) addContextToParseTree() {
- // add current context to parent if we have a parent
- if p.ctx.GetParent() != nil {
- p.ctx.GetParent().(ParserRuleContext).AddChild(p.ctx)
- }
-}
-
-func (p *BaseParser) EnterRule(localctx ParserRuleContext, state, _ int) {
- p.SetState(state)
- p.ctx = localctx
- p.ctx.SetStart(p.input.LT(1))
- if p.BuildParseTrees {
- p.addContextToParseTree()
- }
- if p.parseListeners != nil {
- p.TriggerEnterRuleEvent()
- }
-}
-
-func (p *BaseParser) ExitRule() {
- p.ctx.SetStop(p.input.LT(-1))
- // trigger event on ctx, before it reverts to parent
- if p.parseListeners != nil {
- p.TriggerExitRuleEvent()
- }
- p.SetState(p.ctx.GetInvokingState())
- if p.ctx.GetParent() != nil {
- p.ctx = p.ctx.GetParent().(ParserRuleContext)
- } else {
- p.ctx = nil
- }
-}
-
-func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
- localctx.SetAltNumber(altNum)
- // if we have a new localctx, make sure we replace existing ctx
- // that is previous child of parse tree
- if p.BuildParseTrees && p.ctx != localctx {
- if p.ctx.GetParent() != nil {
- p.ctx.GetParent().(ParserRuleContext).RemoveLastChild()
- p.ctx.GetParent().(ParserRuleContext).AddChild(localctx)
- }
- }
- p.ctx = localctx
-}
-
-// Get the precedence level for the top-most precedence rule.
-//
-// @return The precedence level for the top-most precedence rule, or -1 if
-// the parser context is not nested within a precedence rule.
-
-func (p *BaseParser) GetPrecedence() int {
- if len(p.precedenceStack) == 0 {
- return -1
- }
-
- return p.precedenceStack[len(p.precedenceStack)-1]
-}
-
-func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, _, precedence int) {
- p.SetState(state)
- p.precedenceStack.Push(precedence)
- p.ctx = localctx
- p.ctx.SetStart(p.input.LT(1))
- if p.parseListeners != nil {
- p.TriggerEnterRuleEvent() // simulates rule entry for
- // left-recursive rules
- }
-}
-
-//
-// Like {@link //EnterRule} but for recursive rules.
-
-func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, _ int) {
- previous := p.ctx
- previous.SetParent(localctx)
- previous.SetInvokingState(state)
- previous.SetStop(p.input.LT(-1))
-
- p.ctx = localctx
- p.ctx.SetStart(previous.GetStart())
- if p.BuildParseTrees {
- p.ctx.AddChild(previous)
- }
- if p.parseListeners != nil {
- p.TriggerEnterRuleEvent() // simulates rule entry for
- // left-recursive rules
- }
-}
-
-func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
- _, _ = p.precedenceStack.Pop()
- p.ctx.SetStop(p.input.LT(-1))
- retCtx := p.ctx // save current ctx (return value)
- // unroll so ctx is as it was before call to recursive method
- if p.parseListeners != nil {
- for p.ctx != parentCtx {
- p.TriggerExitRuleEvent()
- p.ctx = p.ctx.GetParent().(ParserRuleContext)
- }
- } else {
- p.ctx = parentCtx
- }
- // hook into tree
- retCtx.SetParent(parentCtx)
- if p.BuildParseTrees && parentCtx != nil {
- // add return ctx into invoking rule's tree
- parentCtx.AddChild(retCtx)
- }
-}
-
-func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
- ctx := p.ctx
- for ctx != nil {
- if ctx.GetRuleIndex() == ruleIndex {
- return ctx
- }
- ctx = ctx.GetParent().(ParserRuleContext)
- }
- return nil
-}
-
-func (p *BaseParser) Precpred(_ RuleContext, precedence int) bool {
- return precedence >= p.precedenceStack[len(p.precedenceStack)-1]
-}
-
-//goland:noinspection GoUnusedParameter
-func (p *BaseParser) inContext(context ParserRuleContext) bool {
- // TODO: useful in parser?
- return false
-}
-
-// IsExpectedToken checks whether symbol can follow the current state in the
-// {ATN}. The behavior of p.method is equivalent to the following, but is
-// implemented such that the complete context-sensitive follow set does not
-// need to be explicitly constructed.
-//
-// return getExpectedTokens().contains(symbol)
-func (p *BaseParser) IsExpectedToken(symbol int) bool {
- atn := p.Interpreter.atn
- ctx := p.ctx
- s := atn.states[p.state]
- following := atn.NextTokens(s, nil)
- if following.contains(symbol) {
- return true
- }
- if !following.contains(TokenEpsilon) {
- return false
- }
- for ctx != nil && ctx.GetInvokingState() >= 0 && following.contains(TokenEpsilon) {
- invokingState := atn.states[ctx.GetInvokingState()]
- rt := invokingState.GetTransitions()[0]
- following = atn.NextTokens(rt.(*RuleTransition).followState, nil)
- if following.contains(symbol) {
- return true
- }
- ctx = ctx.GetParent().(ParserRuleContext)
- }
- if following.contains(TokenEpsilon) && symbol == TokenEOF {
- return true
- }
-
- return false
-}
-
-// GetExpectedTokens and returns the set of input symbols which could follow the current parser
-// state and context, as given by [GetState] and [GetContext],
-// respectively.
-func (p *BaseParser) GetExpectedTokens() *IntervalSet {
- return p.Interpreter.atn.getExpectedTokens(p.state, p.ctx)
-}
-
-func (p *BaseParser) GetExpectedTokensWithinCurrentRule() *IntervalSet {
- atn := p.Interpreter.atn
- s := atn.states[p.state]
- return atn.NextTokens(s, nil)
-}
-
-// GetRuleIndex get a rule's index (i.e., RULE_ruleName field) or -1 if not found.
-func (p *BaseParser) GetRuleIndex(ruleName string) int {
- var ruleIndex, ok = p.GetRuleIndexMap()[ruleName]
- if ok {
- return ruleIndex
- }
-
- return -1
-}
-
-// GetRuleInvocationStack returns a list of the rule names in your parser instance
-// leading up to a call to the current rule. You could override if
-// you want more details such as the file/line info of where
-// in the ATN a rule is invoked.
-func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string {
- if c == nil {
- c = p.ctx
- }
- stack := make([]string, 0)
- for c != nil {
- // compute what follows who invoked us
- ruleIndex := c.GetRuleIndex()
- if ruleIndex < 0 {
- stack = append(stack, "n/a")
- } else {
- stack = append(stack, p.GetRuleNames()[ruleIndex])
- }
-
- vp := c.GetParent()
-
- if vp == nil {
- break
- }
-
- c = vp.(ParserRuleContext)
- }
- return stack
-}
-
-// GetDFAStrings returns a list of all DFA states used for debugging purposes
-func (p *BaseParser) GetDFAStrings() string {
- return fmt.Sprint(p.Interpreter.decisionToDFA)
-}
-
-// DumpDFA prints the whole of the DFA for debugging
-func (p *BaseParser) DumpDFA() {
- seenOne := false
- for _, dfa := range p.Interpreter.decisionToDFA {
- if dfa.Len() > 0 {
- if seenOne {
- fmt.Println()
- }
- fmt.Println("Decision " + strconv.Itoa(dfa.decision) + ":")
- fmt.Print(dfa.String(p.LiteralNames, p.SymbolicNames))
- seenOne = true
- }
- }
-}
-
-func (p *BaseParser) GetSourceName() string {
- return p.GrammarFileName
-}
-
-// SetTrace installs a trace listener for the parse.
-//
-// During a parse it is sometimes useful to listen in on the rule entry and exit
-// events as well as token Matches. This is for quick and dirty debugging.
-func (p *BaseParser) SetTrace(trace *TraceListener) {
- if trace == nil {
- p.RemoveParseListener(p.tracer)
- p.tracer = nil
- } else {
- if p.tracer != nil {
- p.RemoveParseListener(p.tracer)
- }
- p.tracer = NewTraceListener(p)
- p.AddParseListener(p.tracer)
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go
deleted file mode 100644
index ae2869692..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go
+++ /dev/null
@@ -1,1668 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strconv"
- "strings"
-)
-
-var ()
-
-// ClosureBusy is a store of ATNConfigs and is a tiny abstraction layer over
-// a standard JStore so that we can use Lazy instantiation of the JStore, mostly
-// to avoid polluting the stats module with a ton of JStore instances with nothing in them.
-type ClosureBusy struct {
- bMap *JStore[*ATNConfig, Comparator[*ATNConfig]]
- desc string
-}
-
-// NewClosureBusy creates a new ClosureBusy instance used to avoid infinite recursion for right-recursive rules
-func NewClosureBusy(desc string) *ClosureBusy {
- return &ClosureBusy{
- desc: desc,
- }
-}
-
-func (c *ClosureBusy) Put(config *ATNConfig) (*ATNConfig, bool) {
- if c.bMap == nil {
- c.bMap = NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, c.desc)
- }
- return c.bMap.Put(config)
-}
-
-type ParserATNSimulator struct {
- BaseATNSimulator
-
- parser Parser
- predictionMode int
- input TokenStream
- startIndex int
- dfa *DFA
- mergeCache *JPCMap
- outerContext ParserRuleContext
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *ParserATNSimulator {
-
- p := &ParserATNSimulator{
- BaseATNSimulator: BaseATNSimulator{
- atn: atn,
- sharedContextCache: sharedContextCache,
- },
- }
-
- p.parser = parser
- p.decisionToDFA = decisionToDFA
- // SLL, LL, or LL + exact ambig detection?//
- p.predictionMode = PredictionModeLL
- // LAME globals to avoid parameters!!!!! I need these down deep in predTransition
- p.input = nil
- p.startIndex = 0
- p.outerContext = nil
- p.dfa = nil
- // Each prediction operation uses a cache for merge of prediction contexts.
- // Don't keep around as it wastes huge amounts of memory. [JPCMap]
- // isn't Synchronized, but we're ok since two threads shouldn't reuse same
- // parser/atn-simulator object because it can only handle one input at a time.
- // This maps graphs a and b to merged result c. (a,b) -> c. We can avoid
- // the merge if we ever see a and b again. Note that (b,a) -> c should
- // also be examined during cache lookup.
- //
- p.mergeCache = nil
-
- return p
-}
-
-func (p *ParserATNSimulator) GetPredictionMode() int {
- return p.predictionMode
-}
-
-func (p *ParserATNSimulator) SetPredictionMode(v int) {
- p.predictionMode = v
-}
-
-func (p *ParserATNSimulator) reset() {
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) AdaptivePredict(parser *BaseParser, input TokenStream, decision int, outerContext ParserRuleContext) int {
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("adaptivePredict decision " + strconv.Itoa(decision) +
- " exec LA(1)==" + p.getLookaheadName(input) +
- " line " + strconv.Itoa(input.LT(1).GetLine()) + ":" +
- strconv.Itoa(input.LT(1).GetColumn()))
- }
- p.input = input
- p.startIndex = input.Index()
- p.outerContext = outerContext
-
- dfa := p.decisionToDFA[decision]
- p.dfa = dfa
- m := input.Mark()
- index := input.Index()
-
- defer func() {
- p.dfa = nil
- p.mergeCache = nil // whack cache after each prediction
- // Do not attempt to run a GC now that we're done with the cache as makes the
- // GC overhead terrible for badly formed grammars and has little effect on well formed
- // grammars.
- // I have made some extra effort to try and reduce memory pressure by reusing allocations when
- // possible. However, it can only have a limited effect. The real solution is to encourage grammar
- // authors to think more carefully about their grammar and to use the new antlr.stats tag to inspect
- // what is happening at runtime, along with using the error listener to report ambiguities.
-
- input.Seek(index)
- input.Release(m)
- }()
-
- // Now we are certain to have a specific decision's DFA
- // But, do we still need an initial state?
- var s0 *DFAState
- p.atn.stateMu.RLock()
- if dfa.getPrecedenceDfa() {
- p.atn.edgeMu.RLock()
- // the start state for a precedence DFA depends on the current
- // parser precedence, and is provided by a DFA method.
- s0 = dfa.getPrecedenceStartState(p.parser.GetPrecedence())
- p.atn.edgeMu.RUnlock()
- } else {
- // the start state for a "regular" DFA is just s0
- s0 = dfa.getS0()
- }
- p.atn.stateMu.RUnlock()
-
- if s0 == nil {
- if outerContext == nil {
- outerContext = ParserRuleContextEmpty
- }
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("predictATN decision " + strconv.Itoa(dfa.decision) +
- " exec LA(1)==" + p.getLookaheadName(input) +
- ", outerContext=" + outerContext.String(p.parser.GetRuleNames(), nil))
- }
- fullCtx := false
- s0Closure := p.computeStartState(dfa.atnStartState, ParserRuleContextEmpty, fullCtx)
-
- p.atn.stateMu.Lock()
- if dfa.getPrecedenceDfa() {
- // If p is a precedence DFA, we use applyPrecedenceFilter
- // to convert the computed start state to a precedence start
- // state. We then use DFA.setPrecedenceStartState to set the
- // appropriate start state for the precedence level rather
- // than simply setting DFA.s0.
- //
- dfa.s0.configs = s0Closure
- s0Closure = p.applyPrecedenceFilter(s0Closure)
- s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
- p.atn.edgeMu.Lock()
- dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0)
- p.atn.edgeMu.Unlock()
- } else {
- s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
- dfa.setS0(s0)
- }
- p.atn.stateMu.Unlock()
- }
-
- alt, re := p.execATN(dfa, s0, input, index, outerContext)
- parser.SetError(re)
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("DFA after predictATN: " + dfa.String(p.parser.GetLiteralNames(), nil))
- }
- return alt
-
-}
-
-// execATN performs ATN simulation to compute a predicted alternative based
-// upon the remaining input, but also updates the DFA cache to avoid
-// having to traverse the ATN again for the same input sequence.
-//
-// There are some key conditions we're looking for after computing a new
-// set of ATN configs (proposed DFA state):
-//
-// - If the set is empty, there is no viable alternative for current symbol
-// - Does the state uniquely predict an alternative?
-// - Does the state have a conflict that would prevent us from
-// putting it on the work list?
-//
-// We also have some key operations to do:
-//
-// - Add an edge from previous DFA state to potentially NewDFA state, D,
-// - Upon current symbol but only if adding to work list, which means in all
-// cases except no viable alternative (and possibly non-greedy decisions?)
-// - Collecting predicates and adding semantic context to DFA accept states
-// - adding rule context to context-sensitive DFA accept states
-// - Consuming an input symbol
-// - Reporting a conflict
-// - Reporting an ambiguity
-// - Reporting a context sensitivity
-// - Reporting insufficient predicates
-//
-// Cover these cases:
-//
-// - dead end
-// - single alt
-// - single alt + predicates
-// - conflict
-// - conflict + predicates
-//
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, startIndex int, outerContext ParserRuleContext) (int, RecognitionException) {
-
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("execATN decision " + strconv.Itoa(dfa.decision) +
- ", DFA state " + s0.String() +
- ", LA(1)==" + p.getLookaheadName(input) +
- " line " + strconv.Itoa(input.LT(1).GetLine()) + ":" + strconv.Itoa(input.LT(1).GetColumn()))
- }
-
- previousD := s0
-
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("s0 = " + s0.String())
- }
- t := input.LA(1)
- for { // for more work
- D := p.getExistingTargetState(previousD, t)
- if D == nil {
- D = p.computeTargetState(dfa, previousD, t)
- }
- if D == ATNSimulatorError {
- // if any configs in previous dipped into outer context, that
- // means that input up to t actually finished entry rule
- // at least for SLL decision. Full LL doesn't dip into outer
- // so don't need special case.
- // We will get an error no matter what so delay until after
- // decision better error message. Also, no reachable target
- // ATN states in SLL implies LL will also get nowhere.
- // If conflict in states that dip out, choose min since we
- // will get error no matter what.
- e := p.noViableAlt(input, outerContext, previousD.configs, startIndex)
- input.Seek(startIndex)
- alt := p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext)
- if alt != ATNInvalidAltNumber {
- return alt, nil
- }
- p.parser.SetError(e)
- return ATNInvalidAltNumber, e
- }
- if D.requiresFullContext && p.predictionMode != PredictionModeSLL {
- // IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error)
- conflictingAlts := D.configs.conflictingAlts
- if D.predicates != nil {
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("DFA state has preds in DFA sim LL fail-over")
- }
- conflictIndex := input.Index()
- if conflictIndex != startIndex {
- input.Seek(startIndex)
- }
- conflictingAlts = p.evalSemanticContext(D.predicates, outerContext, true)
- if conflictingAlts.length() == 1 {
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("Full LL avoided")
- }
- return conflictingAlts.minValue(), nil
- }
- if conflictIndex != startIndex {
- // restore the index so Reporting the fallback to full
- // context occurs with the index at the correct spot
- input.Seek(conflictIndex)
- }
- }
- if runtimeConfig.parserATNSimulatorDFADebug {
- fmt.Println("ctx sensitive state " + outerContext.String(nil, nil) + " in " + D.String())
- }
- fullCtx := true
- s0Closure := p.computeStartState(dfa.atnStartState, outerContext, fullCtx)
- p.ReportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.Index())
- alt, re := p.execATNWithFullContext(dfa, D, s0Closure, input, startIndex, outerContext)
- return alt, re
- }
- if D.isAcceptState {
- if D.predicates == nil {
- return D.prediction, nil
- }
- stopIndex := input.Index()
- input.Seek(startIndex)
- alts := p.evalSemanticContext(D.predicates, outerContext, true)
-
- switch alts.length() {
- case 0:
- return ATNInvalidAltNumber, p.noViableAlt(input, outerContext, D.configs, startIndex)
- case 1:
- return alts.minValue(), nil
- default:
- // Report ambiguity after predicate evaluation to make sure the correct set of ambig alts is Reported.
- p.ReportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configs)
- return alts.minValue(), nil
- }
- }
- previousD = D
-
- if t != TokenEOF {
- input.Consume()
- t = input.LA(1)
- }
- }
-}
-
-// Get an existing target state for an edge in the DFA. If the target state
-// for the edge has not yet been computed or is otherwise not available,
-// p method returns {@code nil}.
-//
-// @param previousD The current DFA state
-// @param t The next input symbol
-// @return The existing target DFA state for the given input symbol
-// {@code t}, or {@code nil} if the target state for p edge is not
-// already cached
-
-func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int) *DFAState {
- if t+1 < 0 {
- return nil
- }
-
- p.atn.edgeMu.RLock()
- defer p.atn.edgeMu.RUnlock()
- edges := previousD.getEdges()
- if edges == nil || t+1 >= len(edges) {
- return nil
- }
- return previousD.getIthEdge(t + 1)
-}
-
-// Compute a target state for an edge in the DFA, and attempt to add the
-// computed state and corresponding edge to the DFA.
-//
-// @param dfa The DFA
-// @param previousD The current DFA state
-// @param t The next input symbol
-//
-// @return The computed target DFA state for the given input symbol
-// {@code t}. If {@code t} does not lead to a valid DFA state, p method
-// returns {@link //ERROR}.
-//
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState, t int) *DFAState {
- reach := p.computeReachSet(previousD.configs, t, false)
-
- if reach == nil {
- p.addDFAEdge(dfa, previousD, t, ATNSimulatorError)
- return ATNSimulatorError
- }
- // create new target state we'll add to DFA after it's complete
- D := NewDFAState(-1, reach)
-
- predictedAlt := p.getUniqueAlt(reach)
-
- if runtimeConfig.parserATNSimulatorDebug {
- altSubSets := PredictionModegetConflictingAltSubsets(reach)
- fmt.Println("SLL altSubSets=" + fmt.Sprint(altSubSets) +
- ", previous=" + previousD.configs.String() +
- ", configs=" + reach.String() +
- ", predict=" + strconv.Itoa(predictedAlt) +
- ", allSubsetsConflict=" +
- fmt.Sprint(PredictionModeallSubsetsConflict(altSubSets)) +
- ", conflictingAlts=" + p.getConflictingAlts(reach).String())
- }
- if predictedAlt != ATNInvalidAltNumber {
- // NO CONFLICT, UNIQUELY PREDICTED ALT
- D.isAcceptState = true
- D.configs.uniqueAlt = predictedAlt
- D.setPrediction(predictedAlt)
- } else if PredictionModehasSLLConflictTerminatingPrediction(p.predictionMode, reach) {
- // MORE THAN ONE VIABLE ALTERNATIVE
- D.configs.conflictingAlts = p.getConflictingAlts(reach)
- D.requiresFullContext = true
- // in SLL-only mode, we will stop at p state and return the minimum alt
- D.isAcceptState = true
- D.setPrediction(D.configs.conflictingAlts.minValue())
- }
- if D.isAcceptState && D.configs.hasSemanticContext {
- p.predicateDFAState(D, p.atn.getDecisionState(dfa.decision))
- if D.predicates != nil {
- D.setPrediction(ATNInvalidAltNumber)
- }
- }
- // all adds to dfa are done after we've created full D state
- D = p.addDFAEdge(dfa, previousD, t, D)
- return D
-}
-
-func (p *ParserATNSimulator) predicateDFAState(dfaState *DFAState, decisionState DecisionState) {
- // We need to test all predicates, even in DFA states that
- // uniquely predict alternative.
- nalts := len(decisionState.GetTransitions())
- // Update DFA so reach becomes accept state with (predicate,alt)
- // pairs if preds found for conflicting alts
- altsToCollectPredsFrom := p.getConflictingAltsOrUniqueAlt(dfaState.configs)
- altToPred := p.getPredsForAmbigAlts(altsToCollectPredsFrom, dfaState.configs, nalts)
- if altToPred != nil {
- dfaState.predicates = p.getPredicatePredictions(altsToCollectPredsFrom, altToPred)
- dfaState.setPrediction(ATNInvalidAltNumber) // make sure we use preds
- } else {
- // There are preds in configs but they might go away
- // when OR'd together like {p}? || NONE == NONE. If neither
- // alt has preds, resolve to min alt
- dfaState.setPrediction(altsToCollectPredsFrom.minValue())
- }
-}
-
-// comes back with reach.uniqueAlt set to a valid alt
-//
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 *ATNConfigSet, input TokenStream, startIndex int, outerContext ParserRuleContext) (int, RecognitionException) {
-
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("execATNWithFullContext " + s0.String())
- }
-
- fullCtx := true
- foundExactAmbig := false
- var reach *ATNConfigSet
- previous := s0
- input.Seek(startIndex)
- t := input.LA(1)
- predictedAlt := -1
-
- for { // for more work
- reach = p.computeReachSet(previous, t, fullCtx)
- if reach == nil {
- // if any configs in previous dipped into outer context, that
- // means that input up to t actually finished entry rule
- // at least for LL decision. Full LL doesn't dip into outer
- // so don't need special case.
- // We will get an error no matter what so delay until after
- // decision better error message. Also, no reachable target
- // ATN states in SLL implies LL will also get nowhere.
- // If conflict in states that dip out, choose min since we
- // will get error no matter what.
- input.Seek(startIndex)
- alt := p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext)
- if alt != ATNInvalidAltNumber {
- return alt, nil
- }
- return alt, p.noViableAlt(input, outerContext, previous, startIndex)
- }
- altSubSets := PredictionModegetConflictingAltSubsets(reach)
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("LL altSubSets=" + fmt.Sprint(altSubSets) + ", predict=" +
- strconv.Itoa(PredictionModegetUniqueAlt(altSubSets)) + ", resolvesToJustOneViableAlt=" +
- fmt.Sprint(PredictionModeresolvesToJustOneViableAlt(altSubSets)))
- }
- reach.uniqueAlt = p.getUniqueAlt(reach)
- // unique prediction?
- if reach.uniqueAlt != ATNInvalidAltNumber {
- predictedAlt = reach.uniqueAlt
- break
- }
- if p.predictionMode != PredictionModeLLExactAmbigDetection {
- predictedAlt = PredictionModeresolvesToJustOneViableAlt(altSubSets)
- if predictedAlt != ATNInvalidAltNumber {
- break
- }
- } else {
- // In exact ambiguity mode, we never try to terminate early.
- // Just keeps scarfing until we know what the conflict is
- if PredictionModeallSubsetsConflict(altSubSets) && PredictionModeallSubsetsEqual(altSubSets) {
- foundExactAmbig = true
- predictedAlt = PredictionModegetSingleViableAlt(altSubSets)
- break
- }
- // else there are multiple non-conflicting subsets or
- // we're not sure what the ambiguity is yet.
- // So, keep going.
- }
- previous = reach
- if t != TokenEOF {
- input.Consume()
- t = input.LA(1)
- }
- }
- // If the configuration set uniquely predicts an alternative,
- // without conflict, then we know that it's a full LL decision
- // not SLL.
- if reach.uniqueAlt != ATNInvalidAltNumber {
- p.ReportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.Index())
- return predictedAlt, nil
- }
- // We do not check predicates here because we have checked them
- // on-the-fly when doing full context prediction.
-
- //
- // In non-exact ambiguity detection mode, we might actually be able to
- // detect an exact ambiguity, but I'm not going to spend the cycles
- // needed to check. We only emit ambiguity warnings in exact ambiguity
- // mode.
- //
- // For example, we might know that we have conflicting configurations.
- // But, that does not mean that there is no way forward without a
- // conflict. It's possible to have non-conflicting alt subsets as in:
- //
- // altSubSets=[{1, 2}, {1, 2}, {1}, {1, 2}]
- //
- // from
- //
- // [(17,1,[5 $]), (13,1,[5 10 $]), (21,1,[5 10 $]), (11,1,[$]),
- // (13,2,[5 10 $]), (21,2,[5 10 $]), (11,2,[$])]
- //
- // In p case, (17,1,[5 $]) indicates there is some next sequence that
- // would resolve p without conflict to alternative 1. Any other viable
- // next sequence, however, is associated with a conflict. We stop
- // looking for input because no amount of further lookahead will alter
- // the fact that we should predict alternative 1. We just can't say for
- // sure that there is an ambiguity without looking further.
-
- p.ReportAmbiguity(dfa, D, startIndex, input.Index(), foundExactAmbig, reach.Alts(), reach)
-
- return predictedAlt, nil
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) computeReachSet(closure *ATNConfigSet, t int, fullCtx bool) *ATNConfigSet {
- if p.mergeCache == nil {
- p.mergeCache = NewJPCMap(ReachSetCollection, "Merge cache for computeReachSet()")
- }
- intermediate := NewATNConfigSet(fullCtx)
-
- // Configurations already in a rule stop state indicate reaching the end
- // of the decision rule (local context) or end of the start rule (full
- // context). Once reached, these configurations are never updated by a
- // closure operation, so they are handled separately for the performance
- // advantage of having a smaller intermediate set when calling closure.
- //
- // For full-context reach operations, separate handling is required to
- // ensure that the alternative Matching the longest overall sequence is
- // chosen when multiple such configurations can Match the input.
-
- var skippedStopStates []*ATNConfig
-
- // First figure out where we can reach on input t
- for _, c := range closure.configs {
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("testing " + p.GetTokenName(t) + " at " + c.String())
- }
-
- if _, ok := c.GetState().(*RuleStopState); ok {
- if fullCtx || t == TokenEOF {
- skippedStopStates = append(skippedStopStates, c)
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("added " + c.String() + " to SkippedStopStates")
- }
- }
- continue
- }
-
- for _, trans := range c.GetState().GetTransitions() {
- target := p.getReachableTarget(trans, t)
- if target != nil {
- cfg := NewATNConfig4(c, target)
- intermediate.Add(cfg, p.mergeCache)
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("added " + cfg.String() + " to intermediate")
- }
- }
- }
- }
-
- // Now figure out where the reach operation can take us...
- var reach *ATNConfigSet
-
- // This block optimizes the reach operation for intermediate sets which
- // trivially indicate a termination state for the overall
- // AdaptivePredict operation.
- //
- // The conditions assume that intermediate
- // contains all configurations relevant to the reach set, but p
- // condition is not true when one or more configurations have been
- // withheld in SkippedStopStates, or when the current symbol is EOF.
- //
- if skippedStopStates == nil && t != TokenEOF {
- if len(intermediate.configs) == 1 {
- // Don't pursue the closure if there is just one state.
- // It can only have one alternative just add to result
- // Also don't pursue the closure if there is unique alternative
- // among the configurations.
- reach = intermediate
- } else if p.getUniqueAlt(intermediate) != ATNInvalidAltNumber {
- // Also don't pursue the closure if there is unique alternative
- // among the configurations.
- reach = intermediate
- }
- }
- // If the reach set could not be trivially determined, perform a closure
- // operation on the intermediate set to compute its initial value.
- //
- if reach == nil {
- reach = NewATNConfigSet(fullCtx)
- closureBusy := NewClosureBusy("ParserATNSimulator.computeReachSet() make a closureBusy")
- treatEOFAsEpsilon := t == TokenEOF
- amount := len(intermediate.configs)
- for k := 0; k < amount; k++ {
- p.closure(intermediate.configs[k], reach, closureBusy, false, fullCtx, treatEOFAsEpsilon)
- }
- }
- if t == TokenEOF {
- // After consuming EOF no additional input is possible, so we are
- // only interested in configurations which reached the end of the
- // decision rule (local context) or end of the start rule (full
- // context). Update reach to contain only these configurations. This
- // handles both explicit EOF transitions in the grammar and implicit
- // EOF transitions following the end of the decision or start rule.
- //
- // When reach==intermediate, no closure operation was performed. In
- // p case, removeAllConfigsNotInRuleStopState needs to check for
- // reachable rule stop states as well as configurations already in
- // a rule stop state.
- //
- // This is handled before the configurations in SkippedStopStates,
- // because any configurations potentially added from that list are
- // already guaranteed to meet this condition whether it's
- // required.
- //
- reach = p.removeAllConfigsNotInRuleStopState(reach, reach.Equals(intermediate))
- }
- // If SkippedStopStates!=nil, then it contains at least one
- // configuration. For full-context reach operations, these
- // configurations reached the end of the start rule, in which case we
- // only add them back to reach if no configuration during the current
- // closure operation reached such a state. This ensures AdaptivePredict
- // chooses an alternative Matching the longest overall sequence when
- // multiple alternatives are viable.
- //
- if skippedStopStates != nil && ((!fullCtx) || (!PredictionModehasConfigInRuleStopState(reach))) {
- for l := 0; l < len(skippedStopStates); l++ {
- reach.Add(skippedStopStates[l], p.mergeCache)
- }
- }
-
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("computeReachSet " + closure.String() + " -> " + reach.String())
- }
-
- if len(reach.configs) == 0 {
- return nil
- }
-
- return reach
-}
-
-// removeAllConfigsNotInRuleStopState returns a configuration set containing only the configurations from
-// configs which are in a [RuleStopState]. If all
-// configurations in configs are already in a rule stop state, this
-// method simply returns configs.
-//
-// When lookToEndOfRule is true, this method uses
-// [ATN].[NextTokens] for each configuration in configs which is
-// not already in a rule stop state to see if a rule stop state is reachable
-// from the configuration via epsilon-only transitions.
-//
-// When lookToEndOfRule is true, this method checks for rule stop states
-// reachable by epsilon-only transitions from each configuration in
-// configs.
-//
-// The func returns configs if all configurations in configs are in a
-// rule stop state, otherwise it returns a new configuration set containing only
-// the configurations from configs which are in a rule stop state
-func (p *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs *ATNConfigSet, lookToEndOfRule bool) *ATNConfigSet {
- if PredictionModeallConfigsInRuleStopStates(configs) {
- return configs
- }
- result := NewATNConfigSet(configs.fullCtx)
- for _, config := range configs.configs {
- if _, ok := config.GetState().(*RuleStopState); ok {
- result.Add(config, p.mergeCache)
- continue
- }
- if lookToEndOfRule && config.GetState().GetEpsilonOnlyTransitions() {
- NextTokens := p.atn.NextTokens(config.GetState(), nil)
- if NextTokens.contains(TokenEpsilon) {
- endOfRuleState := p.atn.ruleToStopState[config.GetState().GetRuleIndex()]
- result.Add(NewATNConfig4(config, endOfRuleState), p.mergeCache)
- }
- }
- }
- return result
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, fullCtx bool) *ATNConfigSet {
- // always at least the implicit call to start rule
- initialContext := predictionContextFromRuleContext(p.atn, ctx)
- configs := NewATNConfigSet(fullCtx)
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("computeStartState from ATN state " + a.String() +
- " initialContext=" + initialContext.String())
- }
-
- for i := 0; i < len(a.GetTransitions()); i++ {
- target := a.GetTransitions()[i].getTarget()
- c := NewATNConfig6(target, i+1, initialContext)
- closureBusy := NewClosureBusy("ParserATNSimulator.computeStartState() make a closureBusy")
- p.closure(c, configs, closureBusy, true, fullCtx, false)
- }
- return configs
-}
-
-// applyPrecedenceFilter transforms the start state computed by
-// [computeStartState] to the special start state used by a
-// precedence [DFA] for a particular precedence value. The transformation
-// process applies the following changes to the start state's configuration
-// set.
-//
-// 1. Evaluate the precedence predicates for each configuration using
-// [SemanticContext].evalPrecedence.
-// 2. Remove all configurations which predict an alternative greater than
-// 1, for which another configuration that predicts alternative 1 is in the
-// same ATN state with the same prediction context.
-//
-// Transformation 2 is valid for the following reasons:
-//
-// - The closure block cannot contain any epsilon transitions which bypass
-// the body of the closure, so all states reachable via alternative 1 are
-// part of the precedence alternatives of the transformed left-recursive
-// rule.
-// - The "primary" portion of a left recursive rule cannot contain an
-// epsilon transition, so the only way an alternative other than 1 can exist
-// in a state that is also reachable via alternative 1 is by nesting calls
-// to the left-recursive rule, with the outer calls not being at the
-// preferred precedence level.
-//
-// The prediction context must be considered by this filter to address
-// situations like the following:
-//
-// grammar TA
-// prog: statement* EOF
-// statement: letterA | statement letterA 'b'
-// letterA: 'a'
-//
-// In the above grammar, the [ATN] state immediately before the token
-// reference 'a' in letterA is reachable from the left edge
-// of both the primary and closure blocks of the left-recursive rule
-// statement. The prediction context associated with each of these
-// configurations distinguishes between them, and prevents the alternative
-// which stepped out to prog, and then back in to statement
-// from being eliminated by the filter.
-//
-// The func returns the transformed configuration set representing the start state
-// for a precedence [DFA] at a particular precedence level (determined by
-// calling [Parser].getPrecedence).
-func (p *ParserATNSimulator) applyPrecedenceFilter(configs *ATNConfigSet) *ATNConfigSet {
-
- statesFromAlt1 := make(map[int]*PredictionContext)
- configSet := NewATNConfigSet(configs.fullCtx)
-
- for _, config := range configs.configs {
- // handle alt 1 first
- if config.GetAlt() != 1 {
- continue
- }
- updatedContext := config.GetSemanticContext().evalPrecedence(p.parser, p.outerContext)
- if updatedContext == nil {
- // the configuration was eliminated
- continue
- }
- statesFromAlt1[config.GetState().GetStateNumber()] = config.GetContext()
- if updatedContext != config.GetSemanticContext() {
- configSet.Add(NewATNConfig2(config, updatedContext), p.mergeCache)
- } else {
- configSet.Add(config, p.mergeCache)
- }
- }
- for _, config := range configs.configs {
-
- if config.GetAlt() == 1 {
- // already handled
- continue
- }
- // In the future, p elimination step could be updated to also
- // filter the prediction context for alternatives predicting alt>1
- // (basically a graph subtraction algorithm).
- if !config.getPrecedenceFilterSuppressed() {
- context := statesFromAlt1[config.GetState().GetStateNumber()]
- if context != nil && context.Equals(config.GetContext()) {
- // eliminated
- continue
- }
- }
- configSet.Add(config, p.mergeCache)
- }
- return configSet
-}
-
-func (p *ParserATNSimulator) getReachableTarget(trans Transition, ttype int) ATNState {
- if trans.Matches(ttype, 0, p.atn.maxTokenType) {
- return trans.getTarget()
- }
-
- return nil
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs *ATNConfigSet, nalts int) []SemanticContext {
-
- altToPred := make([]SemanticContext, nalts+1)
- for _, c := range configs.configs {
- if ambigAlts.contains(c.GetAlt()) {
- altToPred[c.GetAlt()] = SemanticContextorContext(altToPred[c.GetAlt()], c.GetSemanticContext())
- }
- }
- nPredAlts := 0
- for i := 1; i <= nalts; i++ {
- pred := altToPred[i]
- if pred == nil {
- altToPred[i] = SemanticContextNone
- } else if pred != SemanticContextNone {
- nPredAlts++
- }
- }
- // unambiguous alts are nil in altToPred
- if nPredAlts == 0 {
- altToPred = nil
- }
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("getPredsForAmbigAlts result " + fmt.Sprint(altToPred))
- }
- return altToPred
-}
-
-func (p *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altToPred []SemanticContext) []*PredPrediction {
- pairs := make([]*PredPrediction, 0)
- containsPredicate := false
- for i := 1; i < len(altToPred); i++ {
- pred := altToPred[i]
- // un-predicated is indicated by SemanticContextNONE
- if ambigAlts != nil && ambigAlts.contains(i) {
- pairs = append(pairs, NewPredPrediction(pred, i))
- }
- if pred != SemanticContextNone {
- containsPredicate = true
- }
- }
- if !containsPredicate {
- return nil
- }
- return pairs
-}
-
-// getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule is used to improve the localization of error messages by
-// choosing an alternative rather than panic a NoViableAltException in particular prediction scenarios where the
-// Error state was reached during [ATN] simulation.
-//
-// The default implementation of this method uses the following
-// algorithm to identify an [ATN] configuration which successfully parsed the
-// decision entry rule. Choosing such an alternative ensures that the
-// [ParserRuleContext] returned by the calling rule will be complete
-// and valid, and the syntax error will be Reported later at a more
-// localized location.
-//
-// - If a syntactically valid path or paths reach the end of the decision rule, and
-// they are semantically valid if predicated, return the min associated alt.
-// - Else, if a semantically invalid but syntactically valid path exist
-// or paths exist, return the minimum associated alt.
-// - Otherwise, return [ATNInvalidAltNumber].
-//
-// In some scenarios, the algorithm described above could predict an
-// alternative which will result in a [FailedPredicateException] in
-// the parser. Specifically, this could occur if the only configuration
-// capable of successfully parsing to the end of the decision rule is
-// blocked by a semantic predicate. By choosing this alternative within
-// [AdaptivePredict] instead of panic a [NoViableAltException], the resulting
-// [FailedPredicateException] in the parser will identify the specific
-// predicate which is preventing the parser from successfully parsing the
-// decision rule, which helps developers identify and correct logic errors
-// in semantic predicates.
-//
-// pass in the configs holding ATN configurations which were valid immediately before
-// the ERROR state was reached, outerContext as the initial parser context from the paper
-// or the parser stack at the instant before prediction commences.
-//
-// Teh func returns the value to return from [AdaptivePredict], or
-// [ATNInvalidAltNumber] if a suitable alternative was not
-// identified and [AdaptivePredict] should report an error instead.
-func (p *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs *ATNConfigSet, outerContext ParserRuleContext) int {
- cfgs := p.splitAccordingToSemanticValidity(configs, outerContext)
- semValidConfigs := cfgs[0]
- semInvalidConfigs := cfgs[1]
- alt := p.GetAltThatFinishedDecisionEntryRule(semValidConfigs)
- if alt != ATNInvalidAltNumber { // semantically/syntactically viable path exists
- return alt
- }
- // Is there a syntactically valid path with a failed pred?
- if len(semInvalidConfigs.configs) > 0 {
- alt = p.GetAltThatFinishedDecisionEntryRule(semInvalidConfigs)
- if alt != ATNInvalidAltNumber { // syntactically viable path exists
- return alt
- }
- }
- return ATNInvalidAltNumber
-}
-
-func (p *ParserATNSimulator) GetAltThatFinishedDecisionEntryRule(configs *ATNConfigSet) int {
- alts := NewIntervalSet()
-
- for _, c := range configs.configs {
- _, ok := c.GetState().(*RuleStopState)
-
- if c.GetReachesIntoOuterContext() > 0 || (ok && c.GetContext().hasEmptyPath()) {
- alts.addOne(c.GetAlt())
- }
- }
- if alts.length() == 0 {
- return ATNInvalidAltNumber
- }
-
- return alts.first()
-}
-
-// Walk the list of configurations and split them according to
-// those that have preds evaluating to true/false. If no pred, assume
-// true pred and include in succeeded set. Returns Pair of sets.
-//
-// Create a NewSet so as not to alter the incoming parameter.
-//
-// Assumption: the input stream has been restored to the starting point
-// prediction, which is where predicates need to evaluate.
-
-type ATNConfigSetPair struct {
- item0, item1 *ATNConfigSet
-}
-
-func (p *ParserATNSimulator) splitAccordingToSemanticValidity(configs *ATNConfigSet, outerContext ParserRuleContext) []*ATNConfigSet {
- succeeded := NewATNConfigSet(configs.fullCtx)
- failed := NewATNConfigSet(configs.fullCtx)
-
- for _, c := range configs.configs {
- if c.GetSemanticContext() != SemanticContextNone {
- predicateEvaluationResult := c.GetSemanticContext().evaluate(p.parser, outerContext)
- if predicateEvaluationResult {
- succeeded.Add(c, nil)
- } else {
- failed.Add(c, nil)
- }
- } else {
- succeeded.Add(c, nil)
- }
- }
- return []*ATNConfigSet{succeeded, failed}
-}
-
-// evalSemanticContext looks through a list of predicate/alt pairs, returning alts for the
-// pairs that win. A [SemanticContextNone] predicate indicates an alt containing an
-// un-predicated runtimeConfig which behaves as "always true." If !complete
-// then we stop at the first predicate that evaluates to true. This
-// includes pairs with nil predicates.
-//
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPrediction, outerContext ParserRuleContext, complete bool) *BitSet {
- predictions := NewBitSet()
- for i := 0; i < len(predPredictions); i++ {
- pair := predPredictions[i]
- if pair.pred == SemanticContextNone {
- predictions.add(pair.alt)
- if !complete {
- break
- }
- continue
- }
-
- predicateEvaluationResult := pair.pred.evaluate(p.parser, outerContext)
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorDFADebug {
- fmt.Println("eval pred " + pair.String() + "=" + fmt.Sprint(predicateEvaluationResult))
- }
- if predicateEvaluationResult {
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorDFADebug {
- fmt.Println("PREDICT " + fmt.Sprint(pair.alt))
- }
- predictions.add(pair.alt)
- if !complete {
- break
- }
- }
- }
- return predictions
-}
-
-func (p *ParserATNSimulator) closure(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx, treatEOFAsEpsilon bool) {
- initialDepth := 0
- p.closureCheckingStopState(config, configs, closureBusy, collectPredicates,
- fullCtx, initialDepth, treatEOFAsEpsilon)
-}
-
-func (p *ParserATNSimulator) closureCheckingStopState(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("closure(" + config.String() + ")")
- }
-
- var stack []*ATNConfig
- visited := make(map[*ATNConfig]bool)
-
- stack = append(stack, config)
-
- for len(stack) > 0 {
- currConfig := stack[len(stack)-1]
- stack = stack[:len(stack)-1]
-
- if _, ok := visited[currConfig]; ok {
- continue
- }
- visited[currConfig] = true
-
- if _, ok := currConfig.GetState().(*RuleStopState); ok {
- // We hit rule end. If we have context info, use it
- // run thru all possible stack tops in ctx
- if !currConfig.GetContext().isEmpty() {
- for i := 0; i < currConfig.GetContext().length(); i++ {
- if currConfig.GetContext().getReturnState(i) == BasePredictionContextEmptyReturnState {
- if fullCtx {
- nb := NewATNConfig1(currConfig, currConfig.GetState(), BasePredictionContextEMPTY)
- configs.Add(nb, p.mergeCache)
- continue
- } else {
- // we have no context info, just chase follow links (if greedy)
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("FALLING off rule " + p.getRuleName(currConfig.GetState().GetRuleIndex()))
- }
- p.closureWork(currConfig, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
- }
- continue
- }
- returnState := p.atn.states[currConfig.GetContext().getReturnState(i)]
- newContext := currConfig.GetContext().GetParent(i) // "pop" return state
-
- c := NewATNConfig5(returnState, currConfig.GetAlt(), newContext, currConfig.GetSemanticContext())
- // While we have context to pop back from, we may have
- // gotten that context AFTER having falling off a rule.
- // Make sure we track that we are now out of context.
- c.SetReachesIntoOuterContext(currConfig.GetReachesIntoOuterContext())
-
- stack = append(stack, c)
- }
- continue
- } else if fullCtx {
- // reached end of start rule
- configs.Add(currConfig, p.mergeCache)
- continue
- } else {
- // else if we have no context info, just chase follow links (if greedy)
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("FALLING off rule " + p.getRuleName(currConfig.GetState().GetRuleIndex()))
- }
- }
- }
-
- p.closureWork(currConfig, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
- }
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) closureCheckingStopStateRecursive(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("closure(" + config.String() + ")")
- }
-
- if _, ok := config.GetState().(*RuleStopState); ok {
- // We hit rule end. If we have context info, use it
- // run thru all possible stack tops in ctx
- if !config.GetContext().isEmpty() {
- for i := 0; i < config.GetContext().length(); i++ {
- if config.GetContext().getReturnState(i) == BasePredictionContextEmptyReturnState {
- if fullCtx {
- nb := NewATNConfig1(config, config.GetState(), BasePredictionContextEMPTY)
- configs.Add(nb, p.mergeCache)
- continue
- } else {
- // we have no context info, just chase follow links (if greedy)
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex()))
- }
- p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
- }
- continue
- }
- returnState := p.atn.states[config.GetContext().getReturnState(i)]
- newContext := config.GetContext().GetParent(i) // "pop" return state
-
- c := NewATNConfig5(returnState, config.GetAlt(), newContext, config.GetSemanticContext())
- // While we have context to pop back from, we may have
- // gotten that context AFTER having falling off a rule.
- // Make sure we track that we are now out of context.
- c.SetReachesIntoOuterContext(config.GetReachesIntoOuterContext())
- p.closureCheckingStopState(c, configs, closureBusy, collectPredicates, fullCtx, depth-1, treatEOFAsEpsilon)
- }
- return
- } else if fullCtx {
- // reached end of start rule
- configs.Add(config, p.mergeCache)
- return
- } else {
- // else if we have no context info, just chase follow links (if greedy)
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex()))
- }
- }
- }
- p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
-}
-
-// Do the actual work of walking epsilon edges
-//
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) closureWork(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
- state := config.GetState()
- // optimization
- if !state.GetEpsilonOnlyTransitions() {
- configs.Add(config, p.mergeCache)
- // make sure to not return here, because EOF transitions can act as
- // both epsilon transitions and non-epsilon transitions.
- }
- for i := 0; i < len(state.GetTransitions()); i++ {
- if i == 0 && p.canDropLoopEntryEdgeInLeftRecursiveRule(config) {
- continue
- }
-
- t := state.GetTransitions()[i]
- _, ok := t.(*ActionTransition)
- continueCollecting := collectPredicates && !ok
- c := p.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEOFAsEpsilon)
- if c != nil {
- newDepth := depth
-
- if _, ok := config.GetState().(*RuleStopState); ok {
- // target fell off end of rule mark resulting c as having dipped into outer context
- // We can't get here if incoming config was rule stop and we had context
- // track how far we dip into outer context. Might
- // come in handy and we avoid evaluating context dependent
- // preds if this is > 0.
-
- if p.dfa != nil && p.dfa.getPrecedenceDfa() {
- if t.(*EpsilonTransition).outermostPrecedenceReturn == p.dfa.atnStartState.GetRuleIndex() {
- c.setPrecedenceFilterSuppressed(true)
- }
- }
-
- c.SetReachesIntoOuterContext(c.GetReachesIntoOuterContext() + 1)
-
- _, present := closureBusy.Put(c)
- if present {
- // avoid infinite recursion for right-recursive rules
- continue
- }
-
- configs.dipsIntoOuterContext = true // TODO: can remove? only care when we add to set per middle of this method
- newDepth--
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("dips into outer ctx: " + c.String())
- }
- } else {
-
- if !t.getIsEpsilon() {
- _, present := closureBusy.Put(c)
- if present {
- // avoid infinite recursion for EOF* and EOF+
- continue
- }
- }
- if _, ok := t.(*RuleTransition); ok {
- // latch when newDepth goes negative - once we step out of the entry context we can't return
- if newDepth >= 0 {
- newDepth++
- }
- }
- }
- p.closureCheckingStopState(c, configs, closureBusy, continueCollecting, fullCtx, newDepth, treatEOFAsEpsilon)
- }
- }
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) canDropLoopEntryEdgeInLeftRecursiveRule(config *ATNConfig) bool {
- if !runtimeConfig.lRLoopEntryBranchOpt {
- return false
- }
-
- _p := config.GetState()
-
- // First check to see if we are in StarLoopEntryState generated during
- // left-recursion elimination. For efficiency, also check if
- // the context has an empty stack case. If so, it would mean
- // global FOLLOW so we can't perform optimization
- if _p.GetStateType() != ATNStateStarLoopEntry {
- return false
- }
- startLoop, ok := _p.(*StarLoopEntryState)
- if !ok {
- return false
- }
- if !startLoop.precedenceRuleDecision ||
- config.GetContext().isEmpty() ||
- config.GetContext().hasEmptyPath() {
- return false
- }
-
- // Require all return states to return back to the same rule
- // that p is in.
- numCtxs := config.GetContext().length()
- for i := 0; i < numCtxs; i++ {
- returnState := p.atn.states[config.GetContext().getReturnState(i)]
- if returnState.GetRuleIndex() != _p.GetRuleIndex() {
- return false
- }
- }
- x := _p.GetTransitions()[0].getTarget()
- decisionStartState := x.(BlockStartState)
- blockEndStateNum := decisionStartState.getEndState().stateNumber
- blockEndState := p.atn.states[blockEndStateNum].(*BlockEndState)
-
- // Verify that the top of each stack context leads to loop entry/exit
- // state through epsilon edges and w/o leaving rule.
-
- for i := 0; i < numCtxs; i++ { // for each stack context
- returnStateNumber := config.GetContext().getReturnState(i)
- returnState := p.atn.states[returnStateNumber]
-
- // all states must have single outgoing epsilon edge
- if len(returnState.GetTransitions()) != 1 || !returnState.GetTransitions()[0].getIsEpsilon() {
- return false
- }
-
- // Look for prefix op case like 'not expr', (' type ')' expr
- returnStateTarget := returnState.GetTransitions()[0].getTarget()
- if returnState.GetStateType() == ATNStateBlockEnd && returnStateTarget == _p {
- continue
- }
-
- // Look for 'expr op expr' or case where expr's return state is block end
- // of (...)* internal block; the block end points to loop back
- // which points to p but we don't need to check that
- if returnState == blockEndState {
- continue
- }
-
- // Look for ternary expr ? expr : expr. The return state points at block end,
- // which points at loop entry state
- if returnStateTarget == blockEndState {
- continue
- }
-
- // Look for complex prefix 'between expr and expr' case where 2nd expr's
- // return state points at block end state of (...)* internal block
- if returnStateTarget.GetStateType() == ATNStateBlockEnd &&
- len(returnStateTarget.GetTransitions()) == 1 &&
- returnStateTarget.GetTransitions()[0].getIsEpsilon() &&
- returnStateTarget.GetTransitions()[0].getTarget() == _p {
- continue
- }
-
- // anything else ain't conforming
- return false
- }
-
- return true
-}
-
-func (p *ParserATNSimulator) getRuleName(index int) string {
- if p.parser != nil && index >= 0 {
- return p.parser.GetRuleNames()[index]
- }
- var sb strings.Builder
- sb.Grow(32)
-
- sb.WriteString("')
- return sb.String()
-}
-
-func (p *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t Transition, collectPredicates, inContext, fullCtx, treatEOFAsEpsilon bool) *ATNConfig {
-
- switch t.getSerializationType() {
- case TransitionRULE:
- return p.ruleTransition(config, t.(*RuleTransition))
- case TransitionPRECEDENCE:
- return p.precedenceTransition(config, t.(*PrecedencePredicateTransition), collectPredicates, inContext, fullCtx)
- case TransitionPREDICATE:
- return p.predTransition(config, t.(*PredicateTransition), collectPredicates, inContext, fullCtx)
- case TransitionACTION:
- return p.actionTransition(config, t.(*ActionTransition))
- case TransitionEPSILON:
- return NewATNConfig4(config, t.getTarget())
- case TransitionATOM, TransitionRANGE, TransitionSET:
- // EOF transitions act like epsilon transitions after the first EOF
- // transition is traversed
- if treatEOFAsEpsilon {
- if t.Matches(TokenEOF, 0, 1) {
- return NewATNConfig4(config, t.getTarget())
- }
- }
- return nil
- default:
- return nil
- }
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) actionTransition(config *ATNConfig, t *ActionTransition) *ATNConfig {
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("ACTION edge " + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.actionIndex))
- }
- return NewATNConfig4(config, t.getTarget())
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) precedenceTransition(config *ATNConfig,
- pt *PrecedencePredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig {
-
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("PRED (collectPredicates=" + fmt.Sprint(collectPredicates) + ") " +
- strconv.Itoa(pt.precedence) + ">=_p, ctx dependent=true")
- if p.parser != nil {
- fmt.Println("context surrounding pred is " + fmt.Sprint(p.parser.GetRuleInvocationStack(nil)))
- }
- }
- var c *ATNConfig
- if collectPredicates && inContext {
- if fullCtx {
- // In full context mode, we can evaluate predicates on-the-fly
- // during closure, which dramatically reduces the size of
- // the runtimeConfig sets. It also obviates the need to test predicates
- // later during conflict resolution.
- currentPosition := p.input.Index()
- p.input.Seek(p.startIndex)
- predSucceeds := pt.getPredicate().evaluate(p.parser, p.outerContext)
- p.input.Seek(currentPosition)
- if predSucceeds {
- c = NewATNConfig4(config, pt.getTarget()) // no pred context
- }
- } else {
- newSemCtx := SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate())
- c = NewATNConfig3(config, pt.getTarget(), newSemCtx)
- }
- } else {
- c = NewATNConfig4(config, pt.getTarget())
- }
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("runtimeConfig from pred transition=" + c.String())
- }
- return c
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) predTransition(config *ATNConfig, pt *PredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig {
-
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("PRED (collectPredicates=" + fmt.Sprint(collectPredicates) + ") " + strconv.Itoa(pt.ruleIndex) +
- ":" + strconv.Itoa(pt.predIndex) + ", ctx dependent=" + fmt.Sprint(pt.isCtxDependent))
- if p.parser != nil {
- fmt.Println("context surrounding pred is " + fmt.Sprint(p.parser.GetRuleInvocationStack(nil)))
- }
- }
- var c *ATNConfig
- if collectPredicates && (!pt.isCtxDependent || inContext) {
- if fullCtx {
- // In full context mode, we can evaluate predicates on-the-fly
- // during closure, which dramatically reduces the size of
- // the config sets. It also obviates the need to test predicates
- // later during conflict resolution.
- currentPosition := p.input.Index()
- p.input.Seek(p.startIndex)
- predSucceeds := pt.getPredicate().evaluate(p.parser, p.outerContext)
- p.input.Seek(currentPosition)
- if predSucceeds {
- c = NewATNConfig4(config, pt.getTarget()) // no pred context
- }
- } else {
- newSemCtx := SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate())
- c = NewATNConfig3(config, pt.getTarget(), newSemCtx)
- }
- } else {
- c = NewATNConfig4(config, pt.getTarget())
- }
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("config from pred transition=" + c.String())
- }
- return c
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) ruleTransition(config *ATNConfig, t *RuleTransition) *ATNConfig {
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("CALL rule " + p.getRuleName(t.getTarget().GetRuleIndex()) + ", ctx=" + config.GetContext().String())
- }
- returnState := t.followState
- newContext := SingletonBasePredictionContextCreate(config.GetContext(), returnState.GetStateNumber())
- return NewATNConfig1(config, t.getTarget(), newContext)
-}
-
-func (p *ParserATNSimulator) getConflictingAlts(configs *ATNConfigSet) *BitSet {
- altsets := PredictionModegetConflictingAltSubsets(configs)
- return PredictionModeGetAlts(altsets)
-}
-
-// getConflictingAltsOrUniqueAlt Sam pointed out a problem with the previous definition, v3, of
-// ambiguous states. If we have another state associated with conflicting
-// alternatives, we should keep going. For example, the following grammar
-//
-// s : (ID | ID ID?) ;
-//
-// When the [ATN] simulation reaches the state before ;, it has a [DFA]
-// state that looks like:
-//
-// [12|1|[], 6|2|[], 12|2|[]].
-//
-// Naturally
-//
-// 12|1|[] and 12|2|[]
-//
-// conflict, but we cannot stop processing this node
-// because alternative to has another way to continue, via
-//
-// [6|2|[]].
-//
-// The key is that we have a single state that has config's only associated
-// with a single alternative, 2, and crucially the state transitions
-// among the configurations are all non-epsilon transitions. That means
-// we don't consider any conflicts that include alternative 2. So, we
-// ignore the conflict between alts 1 and 2. We ignore a set of
-// conflicting alts when there is an intersection with an alternative
-// associated with a single alt state in the state config-list map.
-//
-// It's also the case that we might have two conflicting configurations but
-// also a 3rd non-conflicting configuration for a different alternative:
-//
-// [1|1|[], 1|2|[], 8|3|[]].
-//
-// This can come about from grammar:
-//
-// a : A | A | A B
-//
-// After Matching input A, we reach the stop state for rule A, state 1.
-// State 8 is the state right before B. Clearly alternatives 1 and 2
-// conflict and no amount of further lookahead will separate the two.
-// However, alternative 3 will be able to continue, so we do not
-// stop working on this state.
-//
-// In the previous example, we're concerned
-// with states associated with the conflicting alternatives. Here alt
-// 3 is not associated with the conflicting configs, but since we can continue
-// looking for input reasonably, I don't declare the state done. We
-// ignore a set of conflicting alts when we have an alternative
-// that we still need to pursue.
-func (p *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs *ATNConfigSet) *BitSet {
- var conflictingAlts *BitSet
- if configs.uniqueAlt != ATNInvalidAltNumber {
- conflictingAlts = NewBitSet()
- conflictingAlts.add(configs.uniqueAlt)
- } else {
- conflictingAlts = configs.conflictingAlts
- }
- return conflictingAlts
-}
-
-func (p *ParserATNSimulator) GetTokenName(t int) string {
- if t == TokenEOF {
- return "EOF"
- }
-
- if p.parser != nil && p.parser.GetLiteralNames() != nil && t < len(p.parser.GetLiteralNames()) {
- return p.parser.GetLiteralNames()[t] + "<" + strconv.Itoa(t) + ">"
- }
-
- if p.parser != nil && p.parser.GetLiteralNames() != nil && t < len(p.parser.GetSymbolicNames()) {
- return p.parser.GetSymbolicNames()[t] + "<" + strconv.Itoa(t) + ">"
- }
-
- return strconv.Itoa(t)
-}
-
-func (p *ParserATNSimulator) getLookaheadName(input TokenStream) string {
- return p.GetTokenName(input.LA(1))
-}
-
-// Used for debugging in [AdaptivePredict] around [execATN], but I cut
-// it out for clarity now that alg. works well. We can leave this
-// "dead" code for a bit.
-func (p *ParserATNSimulator) dumpDeadEndConfigs(_ *NoViableAltException) {
-
- panic("Not implemented")
-
- // fmt.Println("dead end configs: ")
- // var decs = nvae.deadEndConfigs
- //
- // for i:=0; i0) {
- // var t = c.state.GetTransitions()[0]
- // if t2, ok := t.(*AtomTransition); ok {
- // trans = "Atom "+ p.GetTokenName(t2.label)
- // } else if t3, ok := t.(SetTransition); ok {
- // _, ok := t.(*NotSetTransition)
- //
- // var s string
- // if (ok){
- // s = "~"
- // }
- //
- // trans = s + "Set " + t3.set
- // }
- // }
- // fmt.Errorf(c.String(p.parser, true) + ":" + trans)
- // }
-}
-
-func (p *ParserATNSimulator) noViableAlt(input TokenStream, outerContext ParserRuleContext, configs *ATNConfigSet, startIndex int) *NoViableAltException {
- return NewNoViableAltException(p.parser, input, input.Get(startIndex), input.LT(1), configs, outerContext)
-}
-
-func (p *ParserATNSimulator) getUniqueAlt(configs *ATNConfigSet) int {
- alt := ATNInvalidAltNumber
- for _, c := range configs.configs {
- if alt == ATNInvalidAltNumber {
- alt = c.GetAlt() // found first alt
- } else if c.GetAlt() != alt {
- return ATNInvalidAltNumber
- }
- }
- return alt
-}
-
-// Add an edge to the DFA, if possible. This method calls
-// {@link //addDFAState} to ensure the {@code to} state is present in the
-// DFA. If {@code from} is {@code nil}, or if {@code t} is outside the
-// range of edges that can be represented in the DFA tables, p method
-// returns without adding the edge to the DFA.
-//
-//
If {@code to} is {@code nil}, p method returns {@code nil}.
-// Otherwise, p method returns the {@link DFAState} returned by calling
-// {@link //addDFAState} for the {@code to} state.
-//
-// @param dfa The DFA
-// @param from The source state for the edge
-// @param t The input symbol
-// @param to The target state for the edge
-//
-// @return If {@code to} is {@code nil}, p method returns {@code nil}
-// otherwise p method returns the result of calling {@link //addDFAState}
-// on {@code to}
-//
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFAState) *DFAState {
- if runtimeConfig.parserATNSimulatorDebug {
- fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + p.GetTokenName(t))
- }
- if to == nil {
- return nil
- }
- p.atn.stateMu.Lock()
- to = p.addDFAState(dfa, to) // used existing if possible not incoming
- p.atn.stateMu.Unlock()
- if from == nil || t < -1 || t > p.atn.maxTokenType {
- return to
- }
- p.atn.edgeMu.Lock()
- if from.getEdges() == nil {
- from.setEdges(make([]*DFAState, p.atn.maxTokenType+1+1))
- }
- from.setIthEdge(t+1, to) // connect
- p.atn.edgeMu.Unlock()
-
- if runtimeConfig.parserATNSimulatorDebug {
- var names []string
- if p.parser != nil {
- names = p.parser.GetLiteralNames()
- }
-
- fmt.Println("DFA=\n" + dfa.String(names, nil))
- }
- return to
-}
-
-// addDFAState adds state D to the [DFA] if it is not already present, and returns
-// the actual instance stored in the [DFA]. If a state equivalent to D
-// is already in the [DFA], the existing state is returned. Otherwise, this
-// method returns D after adding it to the [DFA].
-//
-// If D is [ATNSimulatorError], this method returns [ATNSimulatorError] and
-// does not change the DFA.
-//
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) addDFAState(dfa *DFA, d *DFAState) *DFAState {
- if d == ATNSimulatorError {
- return d
- }
-
- existing, present := dfa.Get(d)
- if present {
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Print("addDFAState " + d.String() + " exists")
- }
- return existing
- }
-
- // The state will be added if not already there or we will be given back the existing state struct
- // if it is present.
- //
- d.stateNumber = dfa.Len()
- if !d.configs.readOnly {
- d.configs.OptimizeConfigs(&p.BaseATNSimulator)
- d.configs.readOnly = true
- d.configs.configLookup = nil
- }
- dfa.Put(d)
-
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("addDFAState new " + d.String())
- }
-
- return d
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) ReportAttemptingFullContext(dfa *DFA, conflictingAlts *BitSet, configs *ATNConfigSet, startIndex, stopIndex int) {
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug {
- interval := NewInterval(startIndex, stopIndex+1)
- fmt.Println("ReportAttemptingFullContext decision=" + strconv.Itoa(dfa.decision) + ":" + configs.String() +
- ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
- }
- if p.parser != nil {
- p.parser.GetErrorListenerDispatch().ReportAttemptingFullContext(p.parser, dfa, startIndex, stopIndex, conflictingAlts, configs)
- }
-}
-
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) ReportContextSensitivity(dfa *DFA, prediction int, configs *ATNConfigSet, startIndex, stopIndex int) {
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug {
- interval := NewInterval(startIndex, stopIndex+1)
- fmt.Println("ReportContextSensitivity decision=" + strconv.Itoa(dfa.decision) + ":" + configs.String() +
- ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
- }
- if p.parser != nil {
- p.parser.GetErrorListenerDispatch().ReportContextSensitivity(p.parser, dfa, startIndex, stopIndex, prediction, configs)
- }
-}
-
-// ReportAmbiguity reports and ambiguity in the parse, which shows that the parser will explore a different route.
-//
-// If context-sensitive parsing, we know it's an ambiguity not a conflict or error, but we can report it to the developer
-// so that they can see that this is happening and can take action if they want to.
-//
-//goland:noinspection GoBoolExpressions
-func (p *ParserATNSimulator) ReportAmbiguity(dfa *DFA, _ *DFAState, startIndex, stopIndex int,
- exact bool, ambigAlts *BitSet, configs *ATNConfigSet) {
- if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug {
- interval := NewInterval(startIndex, stopIndex+1)
- fmt.Println("ReportAmbiguity " + ambigAlts.String() + ":" + configs.String() +
- ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval))
- }
- if p.parser != nil {
- p.parser.GetErrorListenerDispatch().ReportAmbiguity(p.parser, dfa, startIndex, stopIndex, exact, ambigAlts, configs)
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go b/vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go
deleted file mode 100644
index c249bc138..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go
+++ /dev/null
@@ -1,421 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "reflect"
- "strconv"
-)
-
-type ParserRuleContext interface {
- RuleContext
-
- SetException(RecognitionException)
-
- AddTokenNode(token Token) *TerminalNodeImpl
- AddErrorNode(badToken Token) *ErrorNodeImpl
-
- EnterRule(listener ParseTreeListener)
- ExitRule(listener ParseTreeListener)
-
- SetStart(Token)
- GetStart() Token
-
- SetStop(Token)
- GetStop() Token
-
- AddChild(child RuleContext) RuleContext
- RemoveLastChild()
-}
-
-type BaseParserRuleContext struct {
- parentCtx RuleContext
- invokingState int
- RuleIndex int
-
- start, stop Token
- exception RecognitionException
- children []Tree
-}
-
-func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int) *BaseParserRuleContext {
- prc := new(BaseParserRuleContext)
- InitBaseParserRuleContext(prc, parent, invokingStateNumber)
- return prc
-}
-
-func InitBaseParserRuleContext(prc *BaseParserRuleContext, parent ParserRuleContext, invokingStateNumber int) {
- // What context invoked b rule?
- prc.parentCtx = parent
-
- // What state invoked the rule associated with b context?
- // The "return address" is the followState of invokingState
- // If parent is nil, b should be -1.
- if parent == nil {
- prc.invokingState = -1
- } else {
- prc.invokingState = invokingStateNumber
- }
-
- prc.RuleIndex = -1
- // * If we are debugging or building a parse tree for a Visitor,
- // we need to track all of the tokens and rule invocations associated
- // with prc rule's context. This is empty for parsing w/o tree constr.
- // operation because we don't the need to track the details about
- // how we parse prc rule.
- // /
- prc.children = nil
- prc.start = nil
- prc.stop = nil
- // The exception that forced prc rule to return. If the rule successfully
- // completed, prc is {@code nil}.
- prc.exception = nil
-}
-
-func (prc *BaseParserRuleContext) SetException(e RecognitionException) {
- prc.exception = e
-}
-
-func (prc *BaseParserRuleContext) GetChildren() []Tree {
- return prc.children
-}
-
-func (prc *BaseParserRuleContext) CopyFrom(ctx *BaseParserRuleContext) {
- // from RuleContext
- prc.parentCtx = ctx.parentCtx
- prc.invokingState = ctx.invokingState
- prc.children = nil
- prc.start = ctx.start
- prc.stop = ctx.stop
-}
-
-func (prc *BaseParserRuleContext) GetText() string {
- if prc.GetChildCount() == 0 {
- return ""
- }
-
- var s string
- for _, child := range prc.children {
- s += child.(ParseTree).GetText()
- }
-
- return s
-}
-
-// EnterRule is called when any rule is entered.
-func (prc *BaseParserRuleContext) EnterRule(_ ParseTreeListener) {
-}
-
-// ExitRule is called when any rule is exited.
-func (prc *BaseParserRuleContext) ExitRule(_ ParseTreeListener) {
-}
-
-// * Does not set parent link other add methods do that
-func (prc *BaseParserRuleContext) addTerminalNodeChild(child TerminalNode) TerminalNode {
- if prc.children == nil {
- prc.children = make([]Tree, 0)
- }
- if child == nil {
- panic("Child may not be null")
- }
- prc.children = append(prc.children, child)
- return child
-}
-
-func (prc *BaseParserRuleContext) AddChild(child RuleContext) RuleContext {
- if prc.children == nil {
- prc.children = make([]Tree, 0)
- }
- if child == nil {
- panic("Child may not be null")
- }
- prc.children = append(prc.children, child)
- return child
-}
-
-// RemoveLastChild is used by [EnterOuterAlt] to toss out a [RuleContext] previously added as
-// we entered a rule. If we have a label, we will need to remove
-// the generic ruleContext object.
-func (prc *BaseParserRuleContext) RemoveLastChild() {
- if prc.children != nil && len(prc.children) > 0 {
- prc.children = prc.children[0 : len(prc.children)-1]
- }
-}
-
-func (prc *BaseParserRuleContext) AddTokenNode(token Token) *TerminalNodeImpl {
-
- node := NewTerminalNodeImpl(token)
- prc.addTerminalNodeChild(node)
- node.parentCtx = prc
- return node
-
-}
-
-func (prc *BaseParserRuleContext) AddErrorNode(badToken Token) *ErrorNodeImpl {
- node := NewErrorNodeImpl(badToken)
- prc.addTerminalNodeChild(node)
- node.parentCtx = prc
- return node
-}
-
-func (prc *BaseParserRuleContext) GetChild(i int) Tree {
- if prc.children != nil && len(prc.children) >= i {
- return prc.children[i]
- }
-
- return nil
-}
-
-func (prc *BaseParserRuleContext) GetChildOfType(i int, childType reflect.Type) RuleContext {
- if childType == nil {
- return prc.GetChild(i).(RuleContext)
- }
-
- for j := 0; j < len(prc.children); j++ {
- child := prc.children[j]
- if reflect.TypeOf(child) == childType {
- if i == 0 {
- return child.(RuleContext)
- }
-
- i--
- }
- }
-
- return nil
-}
-
-func (prc *BaseParserRuleContext) ToStringTree(ruleNames []string, recog Recognizer) string {
- return TreesStringTree(prc, ruleNames, recog)
-}
-
-func (prc *BaseParserRuleContext) GetRuleContext() RuleContext {
- return prc
-}
-
-func (prc *BaseParserRuleContext) Accept(visitor ParseTreeVisitor) interface{} {
- return visitor.VisitChildren(prc)
-}
-
-func (prc *BaseParserRuleContext) SetStart(t Token) {
- prc.start = t
-}
-
-func (prc *BaseParserRuleContext) GetStart() Token {
- return prc.start
-}
-
-func (prc *BaseParserRuleContext) SetStop(t Token) {
- prc.stop = t
-}
-
-func (prc *BaseParserRuleContext) GetStop() Token {
- return prc.stop
-}
-
-func (prc *BaseParserRuleContext) GetToken(ttype int, i int) TerminalNode {
-
- for j := 0; j < len(prc.children); j++ {
- child := prc.children[j]
- if c2, ok := child.(TerminalNode); ok {
- if c2.GetSymbol().GetTokenType() == ttype {
- if i == 0 {
- return c2
- }
-
- i--
- }
- }
- }
- return nil
-}
-
-func (prc *BaseParserRuleContext) GetTokens(ttype int) []TerminalNode {
- if prc.children == nil {
- return make([]TerminalNode, 0)
- }
-
- tokens := make([]TerminalNode, 0)
-
- for j := 0; j < len(prc.children); j++ {
- child := prc.children[j]
- if tchild, ok := child.(TerminalNode); ok {
- if tchild.GetSymbol().GetTokenType() == ttype {
- tokens = append(tokens, tchild)
- }
- }
- }
-
- return tokens
-}
-
-func (prc *BaseParserRuleContext) GetPayload() interface{} {
- return prc
-}
-
-func (prc *BaseParserRuleContext) getChild(ctxType reflect.Type, i int) RuleContext {
- if prc.children == nil || i < 0 || i >= len(prc.children) {
- return nil
- }
-
- j := -1 // what element have we found with ctxType?
- for _, o := range prc.children {
-
- childType := reflect.TypeOf(o)
-
- if childType.Implements(ctxType) {
- j++
- if j == i {
- return o.(RuleContext)
- }
- }
- }
- return nil
-}
-
-// Go lacks generics, so it's not possible for us to return the child with the correct type, but we do
-// check for convertibility
-
-func (prc *BaseParserRuleContext) GetTypedRuleContext(ctxType reflect.Type, i int) RuleContext {
- return prc.getChild(ctxType, i)
-}
-
-func (prc *BaseParserRuleContext) GetTypedRuleContexts(ctxType reflect.Type) []RuleContext {
- if prc.children == nil {
- return make([]RuleContext, 0)
- }
-
- contexts := make([]RuleContext, 0)
-
- for _, child := range prc.children {
- childType := reflect.TypeOf(child)
-
- if childType.ConvertibleTo(ctxType) {
- contexts = append(contexts, child.(RuleContext))
- }
- }
- return contexts
-}
-
-func (prc *BaseParserRuleContext) GetChildCount() int {
- if prc.children == nil {
- return 0
- }
-
- return len(prc.children)
-}
-
-func (prc *BaseParserRuleContext) GetSourceInterval() Interval {
- if prc.start == nil || prc.stop == nil {
- return TreeInvalidInterval
- }
-
- return NewInterval(prc.start.GetTokenIndex(), prc.stop.GetTokenIndex())
-}
-
-//need to manage circular dependencies, so export now
-
-// Print out a whole tree, not just a node, in LISP format
-// (root child1 .. childN). Print just a node if b is a leaf.
-//
-
-func (prc *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) string {
-
- var p ParserRuleContext = prc
- s := "["
- for p != nil && p != stop {
- if ruleNames == nil {
- if !p.IsEmpty() {
- s += strconv.Itoa(p.GetInvokingState())
- }
- } else {
- ri := p.GetRuleIndex()
- var ruleName string
- if ri >= 0 && ri < len(ruleNames) {
- ruleName = ruleNames[ri]
- } else {
- ruleName = strconv.Itoa(ri)
- }
- s += ruleName
- }
- if p.GetParent() != nil && (ruleNames != nil || !p.GetParent().(ParserRuleContext).IsEmpty()) {
- s += " "
- }
- pi := p.GetParent()
- if pi != nil {
- p = pi.(ParserRuleContext)
- } else {
- p = nil
- }
- }
- s += "]"
- return s
-}
-
-func (prc *BaseParserRuleContext) SetParent(v Tree) {
- if v == nil {
- prc.parentCtx = nil
- } else {
- prc.parentCtx = v.(RuleContext)
- }
-}
-
-func (prc *BaseParserRuleContext) GetInvokingState() int {
- return prc.invokingState
-}
-
-func (prc *BaseParserRuleContext) SetInvokingState(t int) {
- prc.invokingState = t
-}
-
-func (prc *BaseParserRuleContext) GetRuleIndex() int {
- return prc.RuleIndex
-}
-
-func (prc *BaseParserRuleContext) GetAltNumber() int {
- return ATNInvalidAltNumber
-}
-
-func (prc *BaseParserRuleContext) SetAltNumber(_ int) {}
-
-// IsEmpty returns true if the context of b is empty.
-//
-// A context is empty if there is no invoking state, meaning nobody calls
-// current context.
-func (prc *BaseParserRuleContext) IsEmpty() bool {
- return prc.invokingState == -1
-}
-
-// GetParent returns the combined text of all child nodes. This method only considers
-// tokens which have been added to the parse tree.
-//
-// Since tokens on hidden channels (e.g. whitespace or comments) are not
-// added to the parse trees, they will not appear in the output of this
-// method.
-func (prc *BaseParserRuleContext) GetParent() Tree {
- return prc.parentCtx
-}
-
-var ParserRuleContextEmpty = NewBaseParserRuleContext(nil, -1)
-
-type InterpreterRuleContext interface {
- ParserRuleContext
-}
-
-type BaseInterpreterRuleContext struct {
- *BaseParserRuleContext
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewBaseInterpreterRuleContext(parent BaseInterpreterRuleContext, invokingStateNumber, ruleIndex int) *BaseInterpreterRuleContext {
-
- prc := new(BaseInterpreterRuleContext)
-
- prc.BaseParserRuleContext = NewBaseParserRuleContext(parent, invokingStateNumber)
-
- prc.RuleIndex = ruleIndex
-
- return prc
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go
deleted file mode 100644
index c1b80cc1f..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go
+++ /dev/null
@@ -1,727 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "golang.org/x/exp/slices"
- "strconv"
-)
-
-var _emptyPredictionContextHash int
-
-func init() {
- _emptyPredictionContextHash = murmurInit(1)
- _emptyPredictionContextHash = murmurFinish(_emptyPredictionContextHash, 0)
-}
-
-func calculateEmptyHash() int {
- return _emptyPredictionContextHash
-}
-
-const (
- // BasePredictionContextEmptyReturnState represents {@code $} in an array in full context mode, $
- // doesn't mean wildcard:
- //
- // $ + x = [$,x]
- //
- // Here,
- //
- // $ = EmptyReturnState
- BasePredictionContextEmptyReturnState = 0x7FFFFFFF
-)
-
-// TODO: JI These are meant to be atomics - this does not seem to match the Java runtime here
-//
-//goland:noinspection GoUnusedGlobalVariable
-var (
- BasePredictionContextglobalNodeCount = 1
- BasePredictionContextid = BasePredictionContextglobalNodeCount
-)
-
-const (
- PredictionContextEmpty = iota
- PredictionContextSingleton
- PredictionContextArray
-)
-
-// PredictionContext is a go idiomatic implementation of PredictionContext that does not rty to
-// emulate inheritance from Java, and can be used without an interface definition. An interface
-// is not required because no user code will ever need to implement this interface.
-type PredictionContext struct {
- cachedHash int
- pcType int
- parentCtx *PredictionContext
- returnState int
- parents []*PredictionContext
- returnStates []int
-}
-
-func NewEmptyPredictionContext() *PredictionContext {
- nep := &PredictionContext{}
- nep.cachedHash = calculateEmptyHash()
- nep.pcType = PredictionContextEmpty
- nep.returnState = BasePredictionContextEmptyReturnState
- return nep
-}
-
-func NewBaseSingletonPredictionContext(parent *PredictionContext, returnState int) *PredictionContext {
- pc := &PredictionContext{}
- pc.pcType = PredictionContextSingleton
- pc.returnState = returnState
- pc.parentCtx = parent
- if parent != nil {
- pc.cachedHash = calculateHash(parent, returnState)
- } else {
- pc.cachedHash = calculateEmptyHash()
- }
- return pc
-}
-
-func SingletonBasePredictionContextCreate(parent *PredictionContext, returnState int) *PredictionContext {
- if returnState == BasePredictionContextEmptyReturnState && parent == nil {
- // someone can pass in the bits of an array ctx that mean $
- return BasePredictionContextEMPTY
- }
- return NewBaseSingletonPredictionContext(parent, returnState)
-}
-
-func NewArrayPredictionContext(parents []*PredictionContext, returnStates []int) *PredictionContext {
- // Parent can be nil only if full ctx mode and we make an array
- // from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using
- // nil parent and
- // returnState == {@link //EmptyReturnState}.
- hash := murmurInit(1)
- for _, parent := range parents {
- hash = murmurUpdate(hash, parent.Hash())
- }
- for _, returnState := range returnStates {
- hash = murmurUpdate(hash, returnState)
- }
- hash = murmurFinish(hash, len(parents)<<1)
-
- nec := &PredictionContext{}
- nec.cachedHash = hash
- nec.pcType = PredictionContextArray
- nec.parents = parents
- nec.returnStates = returnStates
- return nec
-}
-
-func (p *PredictionContext) Hash() int {
- return p.cachedHash
-}
-
-func (p *PredictionContext) Equals(other Collectable[*PredictionContext]) bool {
- switch p.pcType {
- case PredictionContextEmpty:
- otherP := other.(*PredictionContext)
- return other == nil || otherP == nil || otherP.isEmpty()
- case PredictionContextSingleton:
- return p.SingletonEquals(other)
- case PredictionContextArray:
- return p.ArrayEquals(other)
- }
- return false
-}
-
-func (p *PredictionContext) ArrayEquals(o Collectable[*PredictionContext]) bool {
- if o == nil {
- return false
- }
- other := o.(*PredictionContext)
- if other == nil || other.pcType != PredictionContextArray {
- return false
- }
- if p.cachedHash != other.Hash() {
- return false // can't be same if hash is different
- }
-
- // Must compare the actual array elements and not just the array address
- //
- return slices.Equal(p.returnStates, other.returnStates) &&
- slices.EqualFunc(p.parents, other.parents, func(x, y *PredictionContext) bool {
- return x.Equals(y)
- })
-}
-
-func (p *PredictionContext) SingletonEquals(other Collectable[*PredictionContext]) bool {
- if other == nil {
- return false
- }
- otherP := other.(*PredictionContext)
- if otherP == nil {
- return false
- }
-
- if p.cachedHash != otherP.Hash() {
- return false // Can't be same if hash is different
- }
-
- if p.returnState != otherP.getReturnState(0) {
- return false
- }
-
- // Both parents must be nil if one is
- if p.parentCtx == nil {
- return otherP.parentCtx == nil
- }
-
- return p.parentCtx.Equals(otherP.parentCtx)
-}
-
-func (p *PredictionContext) GetParent(i int) *PredictionContext {
- switch p.pcType {
- case PredictionContextEmpty:
- return nil
- case PredictionContextSingleton:
- return p.parentCtx
- case PredictionContextArray:
- return p.parents[i]
- }
- return nil
-}
-
-func (p *PredictionContext) getReturnState(i int) int {
- switch p.pcType {
- case PredictionContextArray:
- return p.returnStates[i]
- default:
- return p.returnState
- }
-}
-
-func (p *PredictionContext) GetReturnStates() []int {
- switch p.pcType {
- case PredictionContextArray:
- return p.returnStates
- default:
- return []int{p.returnState}
- }
-}
-
-func (p *PredictionContext) length() int {
- switch p.pcType {
- case PredictionContextArray:
- return len(p.returnStates)
- default:
- return 1
- }
-}
-
-func (p *PredictionContext) hasEmptyPath() bool {
- switch p.pcType {
- case PredictionContextSingleton:
- return p.returnState == BasePredictionContextEmptyReturnState
- }
- return p.getReturnState(p.length()-1) == BasePredictionContextEmptyReturnState
-}
-
-func (p *PredictionContext) String() string {
- switch p.pcType {
- case PredictionContextEmpty:
- return "$"
- case PredictionContextSingleton:
- var up string
-
- if p.parentCtx == nil {
- up = ""
- } else {
- up = p.parentCtx.String()
- }
-
- if len(up) == 0 {
- if p.returnState == BasePredictionContextEmptyReturnState {
- return "$"
- }
-
- return strconv.Itoa(p.returnState)
- }
-
- return strconv.Itoa(p.returnState) + " " + up
- case PredictionContextArray:
- if p.isEmpty() {
- return "[]"
- }
-
- s := "["
- for i := 0; i < len(p.returnStates); i++ {
- if i > 0 {
- s = s + ", "
- }
- if p.returnStates[i] == BasePredictionContextEmptyReturnState {
- s = s + "$"
- continue
- }
- s = s + strconv.Itoa(p.returnStates[i])
- if !p.parents[i].isEmpty() {
- s = s + " " + p.parents[i].String()
- } else {
- s = s + "nil"
- }
- }
- return s + "]"
-
- default:
- return "unknown"
- }
-}
-
-func (p *PredictionContext) isEmpty() bool {
- switch p.pcType {
- case PredictionContextEmpty:
- return true
- case PredictionContextArray:
- // since EmptyReturnState can only appear in the last position, we
- // don't need to verify that size==1
- return p.returnStates[0] == BasePredictionContextEmptyReturnState
- default:
- return false
- }
-}
-
-func (p *PredictionContext) Type() int {
- return p.pcType
-}
-
-func calculateHash(parent *PredictionContext, returnState int) int {
- h := murmurInit(1)
- h = murmurUpdate(h, parent.Hash())
- h = murmurUpdate(h, returnState)
- return murmurFinish(h, 2)
-}
-
-// Convert a {@link RuleContext} tree to a {@link BasePredictionContext} graph.
-// Return {@link //EMPTY} if {@code outerContext} is empty or nil.
-// /
-func predictionContextFromRuleContext(a *ATN, outerContext RuleContext) *PredictionContext {
- if outerContext == nil {
- outerContext = ParserRuleContextEmpty
- }
- // if we are in RuleContext of start rule, s, then BasePredictionContext
- // is EMPTY. Nobody called us. (if we are empty, return empty)
- if outerContext.GetParent() == nil || outerContext == ParserRuleContextEmpty {
- return BasePredictionContextEMPTY
- }
- // If we have a parent, convert it to a BasePredictionContext graph
- parent := predictionContextFromRuleContext(a, outerContext.GetParent().(RuleContext))
- state := a.states[outerContext.GetInvokingState()]
- transition := state.GetTransitions()[0]
-
- return SingletonBasePredictionContextCreate(parent, transition.(*RuleTransition).followState.GetStateNumber())
-}
-
-func merge(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext {
-
- // Share same graph if both same
- //
- if a == b || a.Equals(b) {
- return a
- }
-
- if a.pcType == PredictionContextSingleton && b.pcType == PredictionContextSingleton {
- return mergeSingletons(a, b, rootIsWildcard, mergeCache)
- }
- // At least one of a or b is array
- // If one is $ and rootIsWildcard, return $ as wildcard
- if rootIsWildcard {
- if a.isEmpty() {
- return a
- }
- if b.isEmpty() {
- return b
- }
- }
-
- // Convert either Singleton or Empty to arrays, so that we can merge them
- //
- ara := convertToArray(a)
- arb := convertToArray(b)
- return mergeArrays(ara, arb, rootIsWildcard, mergeCache)
-}
-
-func convertToArray(pc *PredictionContext) *PredictionContext {
- switch pc.Type() {
- case PredictionContextEmpty:
- return NewArrayPredictionContext([]*PredictionContext{}, []int{})
- case PredictionContextSingleton:
- return NewArrayPredictionContext([]*PredictionContext{pc.GetParent(0)}, []int{pc.getReturnState(0)})
- default:
- // Already an array
- }
- return pc
-}
-
-// mergeSingletons merges two Singleton [PredictionContext] instances.
-//
-// Stack tops equal, parents merge is same return left graph.
-//
-//
-//
Same stack top, parents differ merge parents giving array node, then
-// remainders of those graphs. A new root node is created to point to the
-// merged parents.
-//
-//
-//
Different stack tops pointing to same parent. Make array node for the
-// root where both element in the root point to the same (original)
-// parent.
-//
-//
-//
Different stack tops pointing to different parents. Make array node for
-// the root where each element points to the corresponding original
-// parent.
-//
-//
-// @param a the first {@link SingletonBasePredictionContext}
-// @param b the second {@link SingletonBasePredictionContext}
-// @param rootIsWildcard {@code true} if this is a local-context merge,
-// otherwise false to indicate a full-context merge
-// @param mergeCache
-// /
-func mergeSingletons(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext {
- if mergeCache != nil {
- previous, present := mergeCache.Get(a, b)
- if present {
- return previous
- }
- previous, present = mergeCache.Get(b, a)
- if present {
- return previous
- }
- }
-
- rootMerge := mergeRoot(a, b, rootIsWildcard)
- if rootMerge != nil {
- if mergeCache != nil {
- mergeCache.Put(a, b, rootMerge)
- }
- return rootMerge
- }
- if a.returnState == b.returnState {
- parent := merge(a.parentCtx, b.parentCtx, rootIsWildcard, mergeCache)
- // if parent is same as existing a or b parent or reduced to a parent,
- // return it
- if parent.Equals(a.parentCtx) {
- return a // ax + bx = ax, if a=b
- }
- if parent.Equals(b.parentCtx) {
- return b // ax + bx = bx, if a=b
- }
- // else: ax + ay = a'[x,y]
- // merge parents x and y, giving array node with x,y then remainders
- // of those graphs. dup a, a' points at merged array.
- // New joined parent so create a new singleton pointing to it, a'
- spc := SingletonBasePredictionContextCreate(parent, a.returnState)
- if mergeCache != nil {
- mergeCache.Put(a, b, spc)
- }
- return spc
- }
- // a != b payloads differ
- // see if we can collapse parents due to $+x parents if local ctx
- var singleParent *PredictionContext
- if a.Equals(b) || (a.parentCtx != nil && a.parentCtx.Equals(b.parentCtx)) { // ax +
- // bx =
- // [a,b]x
- singleParent = a.parentCtx
- }
- if singleParent != nil { // parents are same
- // sort payloads and use same parent
- payloads := []int{a.returnState, b.returnState}
- if a.returnState > b.returnState {
- payloads[0] = b.returnState
- payloads[1] = a.returnState
- }
- parents := []*PredictionContext{singleParent, singleParent}
- apc := NewArrayPredictionContext(parents, payloads)
- if mergeCache != nil {
- mergeCache.Put(a, b, apc)
- }
- return apc
- }
- // parents differ and can't merge them. Just pack together
- // into array can't merge.
- // ax + by = [ax,by]
- payloads := []int{a.returnState, b.returnState}
- parents := []*PredictionContext{a.parentCtx, b.parentCtx}
- if a.returnState > b.returnState { // sort by payload
- payloads[0] = b.returnState
- payloads[1] = a.returnState
- parents = []*PredictionContext{b.parentCtx, a.parentCtx}
- }
- apc := NewArrayPredictionContext(parents, payloads)
- if mergeCache != nil {
- mergeCache.Put(a, b, apc)
- }
- return apc
-}
-
-// Handle case where at least one of {@code a} or {@code b} is
-// {@link //EMPTY}. In the following diagrams, the symbol {@code $} is used
-// to represent {@link //EMPTY}.
-//
-//
Local-Context Merges
-//
-//
These local-context merge operations are used when {@code rootIsWildcard}
-// is true.
-//
-//
{@link //EMPTY} is superset of any graph return {@link //EMPTY}.
-//
-//
-//
{@link //EMPTY} and anything is {@code //EMPTY}, so merged parent is
-// {@code //EMPTY} return left graph.
-//
-//
-//
Special case of last merge if local context.
-//
-//
-//
Full-Context Merges
-//
-//
These full-context merge operations are used when {@code rootIsWildcard}
-// is false.
-//
-//
-//
-//
Must keep all contexts {@link //EMPTY} in array is a special value (and
-// nil parent).
-//
-//
-//
-//
-// @param a the first {@link SingletonBasePredictionContext}
-// @param b the second {@link SingletonBasePredictionContext}
-// @param rootIsWildcard {@code true} if this is a local-context merge,
-// otherwise false to indicate a full-context merge
-// /
-func mergeRoot(a, b *PredictionContext, rootIsWildcard bool) *PredictionContext {
- if rootIsWildcard {
- if a.pcType == PredictionContextEmpty {
- return BasePredictionContextEMPTY // // + b =//
- }
- if b.pcType == PredictionContextEmpty {
- return BasePredictionContextEMPTY // a +// =//
- }
- } else {
- if a.isEmpty() && b.isEmpty() {
- return BasePredictionContextEMPTY // $ + $ = $
- } else if a.isEmpty() { // $ + x = [$,x]
- payloads := []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState}
- parents := []*PredictionContext{b.GetParent(-1), nil}
- return NewArrayPredictionContext(parents, payloads)
- } else if b.isEmpty() { // x + $ = [$,x] ($ is always first if present)
- payloads := []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState}
- parents := []*PredictionContext{a.GetParent(-1), nil}
- return NewArrayPredictionContext(parents, payloads)
- }
- }
- return nil
-}
-
-// Merge two {@link ArrayBasePredictionContext} instances.
-//
-//
Different tops, different parents.
-//
-//
-//
Shared top, same parents.
-//
-//
-//
Shared top, different parents.
-//
-//
-//
Shared top, all shared parents.
-//
-//
-//
Equal tops, merge parents and reduce top to
-// {@link SingletonBasePredictionContext}.
-//
-//
-//goland:noinspection GoBoolExpressions
-func mergeArrays(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext {
- if mergeCache != nil {
- previous, present := mergeCache.Get(a, b)
- if present {
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> previous")
- }
- return previous
- }
- previous, present = mergeCache.Get(b, a)
- if present {
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> previous")
- }
- return previous
- }
- }
- // merge sorted payloads a + b => M
- i := 0 // walks a
- j := 0 // walks b
- k := 0 // walks target M array
-
- mergedReturnStates := make([]int, len(a.returnStates)+len(b.returnStates))
- mergedParents := make([]*PredictionContext, len(a.returnStates)+len(b.returnStates))
- // walk and merge to yield mergedParents, mergedReturnStates
- for i < len(a.returnStates) && j < len(b.returnStates) {
- aParent := a.parents[i]
- bParent := b.parents[j]
- if a.returnStates[i] == b.returnStates[j] {
- // same payload (stack tops are equal), must yield merged singleton
- payload := a.returnStates[i]
- // $+$ = $
- bothDollars := payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil
- axAX := aParent != nil && bParent != nil && aParent.Equals(bParent) // ax+ax
- // ->
- // ax
- if bothDollars || axAX {
- mergedParents[k] = aParent // choose left
- mergedReturnStates[k] = payload
- } else { // ax+ay -> a'[x,y]
- mergedParent := merge(aParent, bParent, rootIsWildcard, mergeCache)
- mergedParents[k] = mergedParent
- mergedReturnStates[k] = payload
- }
- i++ // hop over left one as usual
- j++ // but also Skip one in right side since we merge
- } else if a.returnStates[i] < b.returnStates[j] { // copy a[i] to M
- mergedParents[k] = aParent
- mergedReturnStates[k] = a.returnStates[i]
- i++
- } else { // b > a, copy b[j] to M
- mergedParents[k] = bParent
- mergedReturnStates[k] = b.returnStates[j]
- j++
- }
- k++
- }
- // copy over any payloads remaining in either array
- if i < len(a.returnStates) {
- for p := i; p < len(a.returnStates); p++ {
- mergedParents[k] = a.parents[p]
- mergedReturnStates[k] = a.returnStates[p]
- k++
- }
- } else {
- for p := j; p < len(b.returnStates); p++ {
- mergedParents[k] = b.parents[p]
- mergedReturnStates[k] = b.returnStates[p]
- k++
- }
- }
- // trim merged if we combined a few that had same stack tops
- if k < len(mergedParents) { // write index < last position trim
- if k == 1 { // for just one merged element, return singleton top
- pc := SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0])
- if mergeCache != nil {
- mergeCache.Put(a, b, pc)
- }
- return pc
- }
- mergedParents = mergedParents[0:k]
- mergedReturnStates = mergedReturnStates[0:k]
- }
-
- M := NewArrayPredictionContext(mergedParents, mergedReturnStates)
-
- // if we created same array as a or b, return that instead
- // TODO: JI track whether this is possible above during merge sort for speed and possibly avoid an allocation
- if M.Equals(a) {
- if mergeCache != nil {
- mergeCache.Put(a, b, a)
- }
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> a")
- }
- return a
- }
- if M.Equals(b) {
- if mergeCache != nil {
- mergeCache.Put(a, b, b)
- }
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> b")
- }
- return b
- }
- combineCommonParents(&mergedParents)
-
- if mergeCache != nil {
- mergeCache.Put(a, b, M)
- }
- if runtimeConfig.parserATNSimulatorTraceATNSim {
- fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> " + M.String())
- }
- return M
-}
-
-// Make pass over all M parents and merge any Equals() ones.
-// Note that we pass a pointer to the slice as we want to modify it in place.
-//
-//goland:noinspection GoUnusedFunction
-func combineCommonParents(parents *[]*PredictionContext) {
- uniqueParents := NewJStore[*PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionContextCollection, "combineCommonParents for PredictionContext")
-
- for p := 0; p < len(*parents); p++ {
- parent := (*parents)[p]
- _, _ = uniqueParents.Put(parent)
- }
- for q := 0; q < len(*parents); q++ {
- pc, _ := uniqueParents.Get((*parents)[q])
- (*parents)[q] = pc
- }
-}
-
-func getCachedBasePredictionContext(context *PredictionContext, contextCache *PredictionContextCache, visited *VisitRecord) *PredictionContext {
- if context.isEmpty() {
- return context
- }
- existing, present := visited.Get(context)
- if present {
- return existing
- }
-
- existing, present = contextCache.Get(context)
- if present {
- visited.Put(context, existing)
- return existing
- }
- changed := false
- parents := make([]*PredictionContext, context.length())
- for i := 0; i < len(parents); i++ {
- parent := getCachedBasePredictionContext(context.GetParent(i), contextCache, visited)
- if changed || !parent.Equals(context.GetParent(i)) {
- if !changed {
- parents = make([]*PredictionContext, context.length())
- for j := 0; j < context.length(); j++ {
- parents[j] = context.GetParent(j)
- }
- changed = true
- }
- parents[i] = parent
- }
- }
- if !changed {
- contextCache.add(context)
- visited.Put(context, context)
- return context
- }
- var updated *PredictionContext
- if len(parents) == 0 {
- updated = BasePredictionContextEMPTY
- } else if len(parents) == 1 {
- updated = SingletonBasePredictionContextCreate(parents[0], context.getReturnState(0))
- } else {
- updated = NewArrayPredictionContext(parents, context.GetReturnStates())
- }
- contextCache.add(updated)
- visited.Put(updated, updated)
- visited.Put(context, updated)
-
- return updated
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go
deleted file mode 100644
index 25dfb11e8..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go
+++ /dev/null
@@ -1,48 +0,0 @@
-package antlr
-
-var BasePredictionContextEMPTY = &PredictionContext{
- cachedHash: calculateEmptyHash(),
- pcType: PredictionContextEmpty,
- returnState: BasePredictionContextEmptyReturnState,
-}
-
-// PredictionContextCache is Used to cache [PredictionContext] objects. It is used for the shared
-// context cash associated with contexts in DFA states. This cache
-// can be used for both lexers and parsers.
-type PredictionContextCache struct {
- cache *JMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]]
-}
-
-func NewPredictionContextCache() *PredictionContextCache {
- return &PredictionContextCache{
- cache: NewJMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionContextCacheCollection, "NewPredictionContextCache()"),
- }
-}
-
-// Add a context to the cache and return it. If the context already exists,
-// return that one instead and do not add a new context to the cache.
-// Protect shared cache from unsafe thread access.
-func (p *PredictionContextCache) add(ctx *PredictionContext) *PredictionContext {
- if ctx.isEmpty() {
- return BasePredictionContextEMPTY
- }
-
- // Put will return the existing entry if it is present (note this is done via Equals, not whether it is
- // the same pointer), otherwise it will add the new entry and return that.
- //
- existing, present := p.cache.Get(ctx)
- if present {
- return existing
- }
- p.cache.Put(ctx, ctx)
- return ctx
-}
-
-func (p *PredictionContextCache) Get(ctx *PredictionContext) (*PredictionContext, bool) {
- pc, exists := p.cache.Get(ctx)
- return pc, exists
-}
-
-func (p *PredictionContextCache) length() int {
- return p.cache.Len()
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go
deleted file mode 100644
index 3f85a6a52..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go
+++ /dev/null
@@ -1,536 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-// This enumeration defines the prediction modes available in ANTLR 4 along with
-// utility methods for analyzing configuration sets for conflicts and/or
-// ambiguities.
-
-const (
- // PredictionModeSLL represents the SLL(*) prediction mode.
- // This prediction mode ignores the current
- // parser context when making predictions. This is the fastest prediction
- // mode, and provides correct results for many grammars. This prediction
- // mode is more powerful than the prediction mode provided by ANTLR 3, but
- // may result in syntax errors for grammar and input combinations which are
- // not SLL.
- //
- // When using this prediction mode, the parser will either return a correct
- // parse tree (i.e. the same parse tree that would be returned with the
- // [PredictionModeLL] prediction mode), or it will Report a syntax error. If a
- // syntax error is encountered when using the SLL prediction mode,
- // it may be due to either an actual syntax error in the input or indicate
- // that the particular combination of grammar and input requires the more
- // powerful LL prediction abilities to complete successfully.
- //
- // This prediction mode does not provide any guarantees for prediction
- // behavior for syntactically-incorrect inputs.
- //
- PredictionModeSLL = 0
-
- // PredictionModeLL represents the LL(*) prediction mode.
- // This prediction mode allows the current parser
- // context to be used for resolving SLL conflicts that occur during
- // prediction. This is the fastest prediction mode that guarantees correct
- // parse results for all combinations of grammars with syntactically correct
- // inputs.
- //
- // When using this prediction mode, the parser will make correct decisions
- // for all syntactically-correct grammar and input combinations. However, in
- // cases where the grammar is truly ambiguous this prediction mode might not
- // report a precise answer for exactly which alternatives are
- // ambiguous.
- //
- // This prediction mode does not provide any guarantees for prediction
- // behavior for syntactically-incorrect inputs.
- //
- PredictionModeLL = 1
-
- // PredictionModeLLExactAmbigDetection represents the LL(*) prediction mode
- // with exact ambiguity detection.
- //
- // In addition to the correctness guarantees provided by the [PredictionModeLL] prediction mode,
- // this prediction mode instructs the prediction algorithm to determine the
- // complete and exact set of ambiguous alternatives for every ambiguous
- // decision encountered while parsing.
- //
- // This prediction mode may be used for diagnosing ambiguities during
- // grammar development. Due to the performance overhead of calculating sets
- // of ambiguous alternatives, this prediction mode should be avoided when
- // the exact results are not necessary.
- //
- // This prediction mode does not provide any guarantees for prediction
- // behavior for syntactically-incorrect inputs.
- //
- PredictionModeLLExactAmbigDetection = 2
-)
-
-// PredictionModehasSLLConflictTerminatingPrediction computes the SLL prediction termination condition.
-//
-// This method computes the SLL prediction termination condition for both of
-// the following cases:
-//
-// - The usual SLL+LL fallback upon SLL conflict
-// - Pure SLL without LL fallback
-//
-// # Combined SLL+LL Parsing
-//
-// When LL-fallback is enabled upon SLL conflict, correct predictions are
-// ensured regardless of how the termination condition is computed by this
-// method. Due to the substantially higher cost of LL prediction, the
-// prediction should only fall back to LL when the additional lookahead
-// cannot lead to a unique SLL prediction.
-//
-// Assuming combined SLL+LL parsing, an SLL configuration set with only
-// conflicting subsets should fall back to full LL, even if the
-// configuration sets don't resolve to the same alternative, e.g.
-//
-// {1,2} and {3,4}
-//
-// If there is at least one non-conflicting
-// configuration, SLL could continue with the hopes that more lookahead will
-// resolve via one of those non-conflicting configurations.
-//
-// Here's the prediction termination rule them: SLL (for SLL+LL parsing)
-// stops when it sees only conflicting configuration subsets. In contrast,
-// full LL keeps going when there is uncertainty.
-//
-// # Heuristic
-//
-// As a heuristic, we stop prediction when we see any conflicting subset
-// unless we see a state that only has one alternative associated with it.
-// The single-alt-state thing lets prediction continue upon rules like
-// (otherwise, it would admit defeat too soon):
-//
-// [12|1|[], 6|2|[], 12|2|[]]. s : (ID | ID ID?) ;
-//
-// When the [ATN] simulation reaches the state before ';', it has a
-// [DFA] state that looks like:
-//
-// [12|1|[], 6|2|[], 12|2|[]]
-//
-// Naturally
-//
-// 12|1|[] and 12|2|[]
-//
-// conflict, but we cannot stop processing this node because alternative to has another way to continue,
-// via
-//
-// [6|2|[]]
-//
-// It also let's us continue for this rule:
-//
-// [1|1|[], 1|2|[], 8|3|[]] a : A | A | A B ;
-//
-// After Matching input A, we reach the stop state for rule A, state 1.
-// State 8 is the state immediately before B. Clearly alternatives 1 and 2
-// conflict and no amount of further lookahead will separate the two.
-// However, alternative 3 will be able to continue, and so we do not stop
-// working on this state. In the previous example, we're concerned with
-// states associated with the conflicting alternatives. Here alt 3 is not
-// associated with the conflicting configs, but since we can continue
-// looking for input reasonably, don't declare the state done.
-//
-// # Pure SLL Parsing
-//
-// To handle pure SLL parsing, all we have to do is make sure that we
-// combine stack contexts for configurations that differ only by semantic
-// predicate. From there, we can do the usual SLL termination heuristic.
-//
-// # Predicates in SLL+LL Parsing
-//
-// SLL decisions don't evaluate predicates until after they reach [DFA] stop
-// states because they need to create the [DFA] cache that works in all
-// semantic situations. In contrast, full LL evaluates predicates collected
-// during start state computation, so it can ignore predicates thereafter.
-// This means that SLL termination detection can totally ignore semantic
-// predicates.
-//
-// Implementation-wise, [ATNConfigSet] combines stack contexts but not
-// semantic predicate contexts, so we might see two configurations like the
-// following:
-//
-// (s, 1, x, {}), (s, 1, x', {p})
-//
-// Before testing these configurations against others, we have to merge
-// x and x' (without modifying the existing configurations).
-// For example, we test (x+x')==x” when looking for conflicts in
-// the following configurations:
-//
-// (s, 1, x, {}), (s, 1, x', {p}), (s, 2, x”, {})
-//
-// If the configuration set has predicates (as indicated by
-// [ATNConfigSet.hasSemanticContext]), this algorithm makes a copy of
-// the configurations to strip out all the predicates so that a standard
-// [ATNConfigSet] will merge everything ignoring predicates.
-func PredictionModehasSLLConflictTerminatingPrediction(mode int, configs *ATNConfigSet) bool {
-
- // Configs in rule stop states indicate reaching the end of the decision
- // rule (local context) or end of start rule (full context). If all
- // configs meet this condition, then none of the configurations is able
- // to Match additional input, so we terminate prediction.
- //
- if PredictionModeallConfigsInRuleStopStates(configs) {
- return true
- }
-
- // pure SLL mode parsing
- if mode == PredictionModeSLL {
- // Don't bother with combining configs from different semantic
- // contexts if we can fail over to full LL costs more time
- // since we'll often fail over anyway.
- if configs.hasSemanticContext {
- // dup configs, tossing out semantic predicates
- dup := NewATNConfigSet(false)
- for _, c := range configs.configs {
-
- // NewATNConfig({semanticContext:}, c)
- c = NewATNConfig2(c, SemanticContextNone)
- dup.Add(c, nil)
- }
- configs = dup
- }
- // now we have combined contexts for configs with dissimilar predicates
- }
- // pure SLL or combined SLL+LL mode parsing
- altsets := PredictionModegetConflictingAltSubsets(configs)
- return PredictionModehasConflictingAltSet(altsets) && !PredictionModehasStateAssociatedWithOneAlt(configs)
-}
-
-// PredictionModehasConfigInRuleStopState checks if any configuration in the given configs is in a
-// [RuleStopState]. Configurations meeting this condition have reached
-// the end of the decision rule (local context) or end of start rule (full
-// context).
-//
-// The func returns true if any configuration in the supplied configs is in a [RuleStopState]
-func PredictionModehasConfigInRuleStopState(configs *ATNConfigSet) bool {
- for _, c := range configs.configs {
- if _, ok := c.GetState().(*RuleStopState); ok {
- return true
- }
- }
- return false
-}
-
-// PredictionModeallConfigsInRuleStopStates checks if all configurations in configs are in a
-// [RuleStopState]. Configurations meeting this condition have reached
-// the end of the decision rule (local context) or end of start rule (full
-// context).
-//
-// the func returns true if all configurations in configs are in a
-// [RuleStopState]
-func PredictionModeallConfigsInRuleStopStates(configs *ATNConfigSet) bool {
-
- for _, c := range configs.configs {
- if _, ok := c.GetState().(*RuleStopState); !ok {
- return false
- }
- }
- return true
-}
-
-// PredictionModeresolvesToJustOneViableAlt checks full LL prediction termination.
-//
-// Can we stop looking ahead during [ATN] simulation or is there some
-// uncertainty as to which alternative we will ultimately pick, after
-// consuming more input? Even if there are partial conflicts, we might know
-// that everything is going to resolve to the same minimum alternative. That
-// means we can stop since no more lookahead will change that fact. On the
-// other hand, there might be multiple conflicts that resolve to different
-// minimums. That means we need more look ahead to decide which of those
-// alternatives we should predict.
-//
-// The basic idea is to split the set of configurations 'C', into
-// conflicting subsets (s, _, ctx, _) and singleton subsets with
-// non-conflicting configurations. Two configurations conflict if they have
-// identical [ATNConfig].state and [ATNConfig].context values
-// but a different [ATNConfig].alt value, e.g.
-//
-// (s, i, ctx, _)
-//
-// and
-//
-// (s, j, ctx, _) ; for i != j
-//
-// Reduce these configuration subsets to the set of possible alternatives.
-// You can compute the alternative subsets in one pass as follows:
-//
-// A_s,ctx = {i | (s, i, ctx, _)}
-//
-// for each configuration in C holding s and ctx fixed.
-//
-// Or in pseudo-code:
-//
-// for each configuration c in C:
-// map[c] U = c.ATNConfig.alt alt // map hash/equals uses s and x, not alt and not pred
-//
-// The values in map are the set of
-//
-// A_s,ctx
-//
-// sets.
-//
-// If
-//
-// |A_s,ctx| = 1
-//
-// then there is no conflict associated with s and ctx.
-//
-// Reduce the subsets to singletons by choosing a minimum of each subset. If
-// the union of these alternative subsets is a singleton, then no amount of
-// further lookahead will help us. We will always pick that alternative. If,
-// however, there is more than one alternative, then we are uncertain which
-// alternative to predict and must continue looking for resolution. We may
-// or may not discover an ambiguity in the future, even if there are no
-// conflicting subsets this round.
-//
-// The biggest sin is to terminate early because it means we've made a
-// decision but were uncertain as to the eventual outcome. We haven't used
-// enough lookahead. On the other hand, announcing a conflict too late is no
-// big deal; you will still have the conflict. It's just inefficient. It
-// might even look until the end of file.
-//
-// No special consideration for semantic predicates is required because
-// predicates are evaluated on-the-fly for full LL prediction, ensuring that
-// no configuration contains a semantic context during the termination
-// check.
-//
-// # Conflicting Configs
-//
-// Two configurations:
-//
-// (s, i, x) and (s, j, x')
-//
-// conflict when i != j but x = x'. Because we merge all
-// (s, i, _) configurations together, that means that there are at
-// most n configurations associated with state s for
-// n possible alternatives in the decision. The merged stacks
-// complicate the comparison of configuration contexts x and x'.
-//
-// Sam checks to see if one is a subset of the other by calling
-// merge and checking to see if the merged result is either x or x'.
-// If the x associated with lowest alternative i
-// is the superset, then i is the only possible prediction since the
-// others resolve to min(i) as well. However, if x is
-// associated with j > i then at least one stack configuration for
-// j is not in conflict with alternative i. The algorithm
-// should keep going, looking for more lookahead due to the uncertainty.
-//
-// For simplicity, I'm doing an equality check between x and
-// x', which lets the algorithm continue to consume lookahead longer
-// than necessary. The reason I like the equality is of course the
-// simplicity but also because that is the test you need to detect the
-// alternatives that are actually in conflict.
-//
-// # Continue/Stop Rule
-//
-// Continue if the union of resolved alternative sets from non-conflicting and
-// conflicting alternative subsets has more than one alternative. We are
-// uncertain about which alternative to predict.
-//
-// The complete set of alternatives,
-//
-// [i for (_, i, _)]
-//
-// tells us which alternatives are still in the running for the amount of input we've
-// consumed at this point. The conflicting sets let us to strip away
-// configurations that won't lead to more states because we resolve
-// conflicts to the configuration with a minimum alternate for the
-// conflicting set.
-//
-// Cases
-//
-// - no conflicts and more than 1 alternative in set => continue
-// - (s, 1, x), (s, 2, x), (s, 3, z), (s', 1, y), (s', 2, y) yields non-conflicting set
-// {3} ∪ conflicting sets min({1,2}) ∪ min({1,2}) = {1,3} => continue
-// - (s, 1, x), (s, 2, x), (s', 1, y), (s', 2, y), (s”, 1, z) yields non-conflicting set
-// {1} ∪ conflicting sets min({1,2}) ∪ min({1,2}) = {1} => stop and predict 1
-// - (s, 1, x), (s, 2, x), (s', 1, y), (s', 2, y) yields conflicting, reduced sets
-// {1} ∪ {1} = {1} => stop and predict 1, can announce ambiguity {1,2}
-// - (s, 1, x), (s, 2, x), (s', 2, y), (s', 3, y) yields conflicting, reduced sets
-// {1} ∪ {2} = {1,2} => continue
-// - (s, 1, x), (s, 2, x), (s', 2, y), (s', 3, y) yields conflicting, reduced sets
-// {1} ∪ {2} = {1,2} => continue
-// - (s, 1, x), (s, 2, x), (s', 3, y), (s', 4, y) yields conflicting, reduced sets
-// {1} ∪ {3} = {1,3} => continue
-//
-// # Exact Ambiguity Detection
-//
-// If all states report the same conflicting set of alternatives, then we
-// know we have the exact ambiguity set:
-//
-// |A_i| > 1
-//
-// and
-//
-// A_i = A_j ; for all i, j
-//
-// In other words, we continue examining lookahead until all A_i
-// have more than one alternative and all A_i are the same. If
-//
-// A={{1,2}, {1,3}}
-//
-// then regular LL prediction would terminate because the resolved set is {1}.
-// To determine what the real ambiguity is, we have to know whether the ambiguity is between one and
-// two or one and three so we keep going. We can only stop prediction when
-// we need exact ambiguity detection when the sets look like:
-//
-// A={{1,2}}
-//
-// or
-//
-// {{1,2},{1,2}}, etc...
-func PredictionModeresolvesToJustOneViableAlt(altsets []*BitSet) int {
- return PredictionModegetSingleViableAlt(altsets)
-}
-
-// PredictionModeallSubsetsConflict determines if every alternative subset in altsets contains more
-// than one alternative.
-//
-// The func returns true if every [BitSet] in altsets has
-// [BitSet].cardinality cardinality > 1
-func PredictionModeallSubsetsConflict(altsets []*BitSet) bool {
- return !PredictionModehasNonConflictingAltSet(altsets)
-}
-
-// PredictionModehasNonConflictingAltSet determines if any single alternative subset in altsets contains
-// exactly one alternative.
-//
-// The func returns true if altsets contains at least one [BitSet] with
-// [BitSet].cardinality cardinality 1
-func PredictionModehasNonConflictingAltSet(altsets []*BitSet) bool {
- for i := 0; i < len(altsets); i++ {
- alts := altsets[i]
- if alts.length() == 1 {
- return true
- }
- }
- return false
-}
-
-// PredictionModehasConflictingAltSet determines if any single alternative subset in altsets contains
-// more than one alternative.
-//
-// The func returns true if altsets contains a [BitSet] with
-// [BitSet].cardinality cardinality > 1, otherwise false
-func PredictionModehasConflictingAltSet(altsets []*BitSet) bool {
- for i := 0; i < len(altsets); i++ {
- alts := altsets[i]
- if alts.length() > 1 {
- return true
- }
- }
- return false
-}
-
-// PredictionModeallSubsetsEqual determines if every alternative subset in altsets is equivalent.
-//
-// The func returns true if every member of altsets is equal to the others.
-func PredictionModeallSubsetsEqual(altsets []*BitSet) bool {
- var first *BitSet
-
- for i := 0; i < len(altsets); i++ {
- alts := altsets[i]
- if first == nil {
- first = alts
- } else if alts != first {
- return false
- }
- }
-
- return true
-}
-
-// PredictionModegetUniqueAlt returns the unique alternative predicted by all alternative subsets in
-// altsets. If no such alternative exists, this method returns
-// [ATNInvalidAltNumber].
-//
-// @param altsets a collection of alternative subsets
-func PredictionModegetUniqueAlt(altsets []*BitSet) int {
- all := PredictionModeGetAlts(altsets)
- if all.length() == 1 {
- return all.minValue()
- }
-
- return ATNInvalidAltNumber
-}
-
-// PredictionModeGetAlts returns the complete set of represented alternatives for a collection of
-// alternative subsets. This method returns the union of each [BitSet]
-// in altsets, being the set of represented alternatives in altsets.
-func PredictionModeGetAlts(altsets []*BitSet) *BitSet {
- all := NewBitSet()
- for _, alts := range altsets {
- all.or(alts)
- }
- return all
-}
-
-// PredictionModegetConflictingAltSubsets gets the conflicting alt subsets from a configuration set.
-//
-// for each configuration c in configs:
-// map[c] U= c.ATNConfig.alt // map hash/equals uses s and x, not alt and not pred
-func PredictionModegetConflictingAltSubsets(configs *ATNConfigSet) []*BitSet {
- configToAlts := NewJMap[*ATNConfig, *BitSet, *ATNAltConfigComparator[*ATNConfig]](atnAltCfgEqInst, AltSetCollection, "PredictionModegetConflictingAltSubsets()")
-
- for _, c := range configs.configs {
-
- alts, ok := configToAlts.Get(c)
- if !ok {
- alts = NewBitSet()
- configToAlts.Put(c, alts)
- }
- alts.add(c.GetAlt())
- }
-
- return configToAlts.Values()
-}
-
-// PredictionModeGetStateToAltMap gets a map from state to alt subset from a configuration set.
-//
-// for each configuration c in configs:
-// map[c.ATNConfig.state] U= c.ATNConfig.alt}
-func PredictionModeGetStateToAltMap(configs *ATNConfigSet) *AltDict {
- m := NewAltDict()
-
- for _, c := range configs.configs {
- alts := m.Get(c.GetState().String())
- if alts == nil {
- alts = NewBitSet()
- m.put(c.GetState().String(), alts)
- }
- alts.(*BitSet).add(c.GetAlt())
- }
- return m
-}
-
-func PredictionModehasStateAssociatedWithOneAlt(configs *ATNConfigSet) bool {
- values := PredictionModeGetStateToAltMap(configs).values()
- for i := 0; i < len(values); i++ {
- if values[i].(*BitSet).length() == 1 {
- return true
- }
- }
- return false
-}
-
-// PredictionModegetSingleViableAlt gets the single alternative predicted by all alternative subsets in altsets
-// if there is one.
-//
-// TODO: JI - Review this code - it does not seem to do the same thing as the Java code - maybe because [BitSet] is not like the Java utils BitSet
-func PredictionModegetSingleViableAlt(altsets []*BitSet) int {
- result := ATNInvalidAltNumber
-
- for i := 0; i < len(altsets); i++ {
- alts := altsets[i]
- minAlt := alts.minValue()
- if result == ATNInvalidAltNumber {
- result = minAlt
- } else if result != minAlt { // more than 1 viable alt
- return ATNInvalidAltNumber
- }
- }
- return result
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/recognizer.go b/vendor/github.com/antlr4-go/antlr/v4/recognizer.go
deleted file mode 100644
index 2e0b504fb..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/recognizer.go
+++ /dev/null
@@ -1,241 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strings"
-
- "strconv"
-)
-
-type Recognizer interface {
- GetLiteralNames() []string
- GetSymbolicNames() []string
- GetRuleNames() []string
-
- Sempred(RuleContext, int, int) bool
- Precpred(RuleContext, int) bool
-
- GetState() int
- SetState(int)
- Action(RuleContext, int, int)
- AddErrorListener(ErrorListener)
- RemoveErrorListeners()
- GetATN() *ATN
- GetErrorListenerDispatch() ErrorListener
- HasError() bool
- GetError() RecognitionException
- SetError(RecognitionException)
-}
-
-type BaseRecognizer struct {
- listeners []ErrorListener
- state int
-
- RuleNames []string
- LiteralNames []string
- SymbolicNames []string
- GrammarFileName string
- SynErr RecognitionException
-}
-
-func NewBaseRecognizer() *BaseRecognizer {
- rec := new(BaseRecognizer)
- rec.listeners = []ErrorListener{ConsoleErrorListenerINSTANCE}
- rec.state = -1
- return rec
-}
-
-//goland:noinspection GoUnusedGlobalVariable
-var tokenTypeMapCache = make(map[string]int)
-
-//goland:noinspection GoUnusedGlobalVariable
-var ruleIndexMapCache = make(map[string]int)
-
-func (b *BaseRecognizer) checkVersion(toolVersion string) {
- runtimeVersion := "4.12.0"
- if runtimeVersion != toolVersion {
- fmt.Println("ANTLR runtime and generated code versions disagree: " + runtimeVersion + "!=" + toolVersion)
- }
-}
-
-func (b *BaseRecognizer) SetError(err RecognitionException) {
- b.SynErr = err
-}
-
-func (b *BaseRecognizer) HasError() bool {
- return b.SynErr != nil
-}
-
-func (b *BaseRecognizer) GetError() RecognitionException {
- return b.SynErr
-}
-
-func (b *BaseRecognizer) Action(_ RuleContext, _, _ int) {
- panic("action not implemented on Recognizer!")
-}
-
-func (b *BaseRecognizer) AddErrorListener(listener ErrorListener) {
- b.listeners = append(b.listeners, listener)
-}
-
-func (b *BaseRecognizer) RemoveErrorListeners() {
- b.listeners = make([]ErrorListener, 0)
-}
-
-func (b *BaseRecognizer) GetRuleNames() []string {
- return b.RuleNames
-}
-
-func (b *BaseRecognizer) GetTokenNames() []string {
- return b.LiteralNames
-}
-
-func (b *BaseRecognizer) GetSymbolicNames() []string {
- return b.SymbolicNames
-}
-
-func (b *BaseRecognizer) GetLiteralNames() []string {
- return b.LiteralNames
-}
-
-func (b *BaseRecognizer) GetState() int {
- return b.state
-}
-
-func (b *BaseRecognizer) SetState(v int) {
- b.state = v
-}
-
-//func (b *Recognizer) GetTokenTypeMap() {
-// var tokenNames = b.GetTokenNames()
-// if (tokenNames==nil) {
-// panic("The current recognizer does not provide a list of token names.")
-// }
-// var result = tokenTypeMapCache[tokenNames]
-// if(result==nil) {
-// result = tokenNames.reduce(function(o, k, i) { o[k] = i })
-// result.EOF = TokenEOF
-// tokenTypeMapCache[tokenNames] = result
-// }
-// return result
-//}
-
-// GetRuleIndexMap Get a map from rule names to rule indexes.
-//
-// Used for XPath and tree pattern compilation.
-//
-// TODO: JI This is not yet implemented in the Go runtime. Maybe not needed.
-func (b *BaseRecognizer) GetRuleIndexMap() map[string]int {
-
- panic("Method not defined!")
- // var ruleNames = b.GetRuleNames()
- // if (ruleNames==nil) {
- // panic("The current recognizer does not provide a list of rule names.")
- // }
- //
- // var result = ruleIndexMapCache[ruleNames]
- // if(result==nil) {
- // result = ruleNames.reduce(function(o, k, i) { o[k] = i })
- // ruleIndexMapCache[ruleNames] = result
- // }
- // return result
-}
-
-// GetTokenType get the token type based upon its name
-func (b *BaseRecognizer) GetTokenType(_ string) int {
- panic("Method not defined!")
- // var ttype = b.GetTokenTypeMap()[tokenName]
- // if (ttype !=nil) {
- // return ttype
- // } else {
- // return TokenInvalidType
- // }
-}
-
-//func (b *Recognizer) GetTokenTypeMap() map[string]int {
-// Vocabulary vocabulary = getVocabulary()
-//
-// Synchronized (tokenTypeMapCache) {
-// Map result = tokenTypeMapCache.Get(vocabulary)
-// if (result == null) {
-// result = new HashMap()
-// for (int i = 0; i < GetATN().maxTokenType; i++) {
-// String literalName = vocabulary.getLiteralName(i)
-// if (literalName != null) {
-// result.put(literalName, i)
-// }
-//
-// String symbolicName = vocabulary.GetSymbolicName(i)
-// if (symbolicName != null) {
-// result.put(symbolicName, i)
-// }
-// }
-//
-// result.put("EOF", Token.EOF)
-// result = Collections.unmodifiableMap(result)
-// tokenTypeMapCache.put(vocabulary, result)
-// }
-//
-// return result
-// }
-//}
-
-// GetErrorHeader returns the error header, normally line/character position information.
-//
-// Can be overridden in sub structs embedding BaseRecognizer.
-func (b *BaseRecognizer) GetErrorHeader(e RecognitionException) string {
- line := e.GetOffendingToken().GetLine()
- column := e.GetOffendingToken().GetColumn()
- return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
-}
-
-// GetTokenErrorDisplay shows how a token should be displayed in an error message.
-//
-// The default is to display just the text, but during development you might
-// want to have a lot of information spit out. Override in that case
-// to use t.String() (which, for CommonToken, dumps everything about
-// the token). This is better than forcing you to override a method in
-// your token objects because you don't have to go modify your lexer
-// so that it creates a NewJava type.
-//
-// Deprecated: This method is not called by the ANTLR 4 Runtime. Specific
-// implementations of [ANTLRErrorStrategy] may provide a similar
-// feature when necessary. For example, see [DefaultErrorStrategy].GetTokenErrorDisplay()
-func (b *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
- if t == nil {
- return ""
- }
- s := t.GetText()
- if s == "" {
- if t.GetTokenType() == TokenEOF {
- s = ""
- } else {
- s = "<" + strconv.Itoa(t.GetTokenType()) + ">"
- }
- }
- s = strings.Replace(s, "\t", "\\t", -1)
- s = strings.Replace(s, "\n", "\\n", -1)
- s = strings.Replace(s, "\r", "\\r", -1)
-
- return "'" + s + "'"
-}
-
-func (b *BaseRecognizer) GetErrorListenerDispatch() ErrorListener {
- return NewProxyErrorListener(b.listeners)
-}
-
-// Sempred embedding structs need to override this if there are sempreds or actions
-// that the ATN interpreter needs to execute
-func (b *BaseRecognizer) Sempred(_ RuleContext, _ int, _ int) bool {
- return true
-}
-
-// Precpred embedding structs need to override this if there are preceding predicates
-// that the ATN interpreter needs to execute
-func (b *BaseRecognizer) Precpred(_ RuleContext, _ int) bool {
- return true
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/rule_context.go b/vendor/github.com/antlr4-go/antlr/v4/rule_context.go
deleted file mode 100644
index f2ad04793..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/rule_context.go
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-// RuleContext is a record of a single rule invocation. It knows
-// which context invoked it, if any. If there is no parent context, then
-// naturally the invoking state is not valid. The parent link
-// provides a chain upwards from the current rule invocation to the root
-// of the invocation tree, forming a stack.
-//
-// We actually carry no information about the rule associated with this context (except
-// when parsing). We keep only the state number of the invoking state from
-// the [ATN] submachine that invoked this. Contrast this with the s
-// pointer inside [ParserRuleContext] that tracks the current state
-// being "executed" for the current rule.
-//
-// The parent contexts are useful for computing lookahead sets and
-// getting error information.
-//
-// These objects are used during parsing and prediction.
-// For the special case of parsers, we use the struct
-// [ParserRuleContext], which embeds a RuleContext.
-//
-// @see ParserRuleContext
-type RuleContext interface {
- RuleNode
-
- GetInvokingState() int
- SetInvokingState(int)
-
- GetRuleIndex() int
- IsEmpty() bool
-
- GetAltNumber() int
- SetAltNumber(altNumber int)
-
- String([]string, RuleContext) string
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/semantic_context.go b/vendor/github.com/antlr4-go/antlr/v4/semantic_context.go
deleted file mode 100644
index 68cb9061e..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/semantic_context.go
+++ /dev/null
@@ -1,464 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strconv"
-)
-
-// SemanticContext is a tree structure used to record the semantic context in which
-//
-// an ATN configuration is valid. It's either a single predicate,
-// a conjunction p1 && p2, or a sum of products p1 || p2.
-//
-// I have scoped the AND, OR, and Predicate subclasses of
-// [SemanticContext] within the scope of this outer ``class''
-type SemanticContext interface {
- Equals(other Collectable[SemanticContext]) bool
- Hash() int
-
- evaluate(parser Recognizer, outerContext RuleContext) bool
- evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext
-
- String() string
-}
-
-func SemanticContextandContext(a, b SemanticContext) SemanticContext {
- if a == nil || a == SemanticContextNone {
- return b
- }
- if b == nil || b == SemanticContextNone {
- return a
- }
- result := NewAND(a, b)
- if len(result.opnds) == 1 {
- return result.opnds[0]
- }
-
- return result
-}
-
-func SemanticContextorContext(a, b SemanticContext) SemanticContext {
- if a == nil {
- return b
- }
- if b == nil {
- return a
- }
- if a == SemanticContextNone || b == SemanticContextNone {
- return SemanticContextNone
- }
- result := NewOR(a, b)
- if len(result.opnds) == 1 {
- return result.opnds[0]
- }
-
- return result
-}
-
-type Predicate struct {
- ruleIndex int
- predIndex int
- isCtxDependent bool
-}
-
-func NewPredicate(ruleIndex, predIndex int, isCtxDependent bool) *Predicate {
- p := new(Predicate)
-
- p.ruleIndex = ruleIndex
- p.predIndex = predIndex
- p.isCtxDependent = isCtxDependent // e.g., $i ref in pred
- return p
-}
-
-//The default {@link SemanticContext}, which is semantically equivalent to
-//a predicate of the form {@code {true}?}.
-
-var SemanticContextNone = NewPredicate(-1, -1, false)
-
-func (p *Predicate) evalPrecedence(_ Recognizer, _ RuleContext) SemanticContext {
- return p
-}
-
-func (p *Predicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
-
- var localctx RuleContext
-
- if p.isCtxDependent {
- localctx = outerContext
- }
-
- return parser.Sempred(localctx, p.ruleIndex, p.predIndex)
-}
-
-func (p *Predicate) Equals(other Collectable[SemanticContext]) bool {
- if p == other {
- return true
- } else if _, ok := other.(*Predicate); !ok {
- return false
- } else {
- return p.ruleIndex == other.(*Predicate).ruleIndex &&
- p.predIndex == other.(*Predicate).predIndex &&
- p.isCtxDependent == other.(*Predicate).isCtxDependent
- }
-}
-
-func (p *Predicate) Hash() int {
- h := murmurInit(0)
- h = murmurUpdate(h, p.ruleIndex)
- h = murmurUpdate(h, p.predIndex)
- if p.isCtxDependent {
- h = murmurUpdate(h, 1)
- } else {
- h = murmurUpdate(h, 0)
- }
- return murmurFinish(h, 3)
-}
-
-func (p *Predicate) String() string {
- return "{" + strconv.Itoa(p.ruleIndex) + ":" + strconv.Itoa(p.predIndex) + "}?"
-}
-
-type PrecedencePredicate struct {
- precedence int
-}
-
-func NewPrecedencePredicate(precedence int) *PrecedencePredicate {
-
- p := new(PrecedencePredicate)
- p.precedence = precedence
-
- return p
-}
-
-func (p *PrecedencePredicate) evaluate(parser Recognizer, outerContext RuleContext) bool {
- return parser.Precpred(outerContext, p.precedence)
-}
-
-func (p *PrecedencePredicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
- if parser.Precpred(outerContext, p.precedence) {
- return SemanticContextNone
- }
-
- return nil
-}
-
-func (p *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
- return p.precedence - other.precedence
-}
-
-func (p *PrecedencePredicate) Equals(other Collectable[SemanticContext]) bool {
-
- var op *PrecedencePredicate
- var ok bool
- if op, ok = other.(*PrecedencePredicate); !ok {
- return false
- }
-
- if p == op {
- return true
- }
-
- return p.precedence == other.(*PrecedencePredicate).precedence
-}
-
-func (p *PrecedencePredicate) Hash() int {
- h := uint32(1)
- h = 31*h + uint32(p.precedence)
- return int(h)
-}
-
-func (p *PrecedencePredicate) String() string {
- return "{" + strconv.Itoa(p.precedence) + ">=prec}?"
-}
-
-func PrecedencePredicatefilterPrecedencePredicates(set *JStore[SemanticContext, Comparator[SemanticContext]]) []*PrecedencePredicate {
- result := make([]*PrecedencePredicate, 0)
-
- set.Each(func(v SemanticContext) bool {
- if c2, ok := v.(*PrecedencePredicate); ok {
- result = append(result, c2)
- }
- return true
- })
-
- return result
-}
-
-// A semantic context which is true whenever none of the contained contexts
-// is false.`
-
-type AND struct {
- opnds []SemanticContext
-}
-
-func NewAND(a, b SemanticContext) *AND {
-
- operands := NewJStore[SemanticContext, Comparator[SemanticContext]](semctxEqInst, SemanticContextCollection, "NewAND() operands")
- if aa, ok := a.(*AND); ok {
- for _, o := range aa.opnds {
- operands.Put(o)
- }
- } else {
- operands.Put(a)
- }
-
- if ba, ok := b.(*AND); ok {
- for _, o := range ba.opnds {
- operands.Put(o)
- }
- } else {
- operands.Put(b)
- }
- precedencePredicates := PrecedencePredicatefilterPrecedencePredicates(operands)
- if len(precedencePredicates) > 0 {
- // interested in the transition with the lowest precedence
- var reduced *PrecedencePredicate
-
- for _, p := range precedencePredicates {
- if reduced == nil || p.precedence < reduced.precedence {
- reduced = p
- }
- }
-
- operands.Put(reduced)
- }
-
- vs := operands.Values()
- opnds := make([]SemanticContext, len(vs))
- copy(opnds, vs)
-
- and := new(AND)
- and.opnds = opnds
-
- return and
-}
-
-func (a *AND) Equals(other Collectable[SemanticContext]) bool {
- if a == other {
- return true
- }
- if _, ok := other.(*AND); !ok {
- return false
- } else {
- for i, v := range other.(*AND).opnds {
- if !a.opnds[i].Equals(v) {
- return false
- }
- }
- return true
- }
-}
-
-// {@inheritDoc}
-//
-//
-// The evaluation of predicates by a context is short-circuiting, but
-// unordered.
-func (a *AND) evaluate(parser Recognizer, outerContext RuleContext) bool {
- for i := 0; i < len(a.opnds); i++ {
- if !a.opnds[i].evaluate(parser, outerContext) {
- return false
- }
- }
- return true
-}
-
-func (a *AND) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
- differs := false
- operands := make([]SemanticContext, 0)
-
- for i := 0; i < len(a.opnds); i++ {
- context := a.opnds[i]
- evaluated := context.evalPrecedence(parser, outerContext)
- differs = differs || (evaluated != context)
- if evaluated == nil {
- // The AND context is false if any element is false
- return nil
- } else if evaluated != SemanticContextNone {
- // Reduce the result by Skipping true elements
- operands = append(operands, evaluated)
- }
- }
- if !differs {
- return a
- }
-
- if len(operands) == 0 {
- // all elements were true, so the AND context is true
- return SemanticContextNone
- }
-
- var result SemanticContext
-
- for _, o := range operands {
- if result == nil {
- result = o
- } else {
- result = SemanticContextandContext(result, o)
- }
- }
-
- return result
-}
-
-func (a *AND) Hash() int {
- h := murmurInit(37) // Init with a value different from OR
- for _, op := range a.opnds {
- h = murmurUpdate(h, op.Hash())
- }
- return murmurFinish(h, len(a.opnds))
-}
-
-func (o *OR) Hash() int {
- h := murmurInit(41) // Init with o value different from AND
- for _, op := range o.opnds {
- h = murmurUpdate(h, op.Hash())
- }
- return murmurFinish(h, len(o.opnds))
-}
-
-func (a *AND) String() string {
- s := ""
-
- for _, o := range a.opnds {
- s += "&& " + fmt.Sprint(o)
- }
-
- if len(s) > 3 {
- return s[0:3]
- }
-
- return s
-}
-
-//
-// A semantic context which is true whenever at least one of the contained
-// contexts is true.
-//
-
-type OR struct {
- opnds []SemanticContext
-}
-
-func NewOR(a, b SemanticContext) *OR {
-
- operands := NewJStore[SemanticContext, Comparator[SemanticContext]](semctxEqInst, SemanticContextCollection, "NewOR() operands")
- if aa, ok := a.(*OR); ok {
- for _, o := range aa.opnds {
- operands.Put(o)
- }
- } else {
- operands.Put(a)
- }
-
- if ba, ok := b.(*OR); ok {
- for _, o := range ba.opnds {
- operands.Put(o)
- }
- } else {
- operands.Put(b)
- }
- precedencePredicates := PrecedencePredicatefilterPrecedencePredicates(operands)
- if len(precedencePredicates) > 0 {
- // interested in the transition with the lowest precedence
- var reduced *PrecedencePredicate
-
- for _, p := range precedencePredicates {
- if reduced == nil || p.precedence > reduced.precedence {
- reduced = p
- }
- }
-
- operands.Put(reduced)
- }
-
- vs := operands.Values()
-
- opnds := make([]SemanticContext, len(vs))
- copy(opnds, vs)
-
- o := new(OR)
- o.opnds = opnds
-
- return o
-}
-
-func (o *OR) Equals(other Collectable[SemanticContext]) bool {
- if o == other {
- return true
- } else if _, ok := other.(*OR); !ok {
- return false
- } else {
- for i, v := range other.(*OR).opnds {
- if !o.opnds[i].Equals(v) {
- return false
- }
- }
- return true
- }
-}
-
-//
-// The evaluation of predicates by o context is short-circuiting, but
-// unordered.
-func (o *OR) evaluate(parser Recognizer, outerContext RuleContext) bool {
- for i := 0; i < len(o.opnds); i++ {
- if o.opnds[i].evaluate(parser, outerContext) {
- return true
- }
- }
- return false
-}
-
-func (o *OR) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext {
- differs := false
- operands := make([]SemanticContext, 0)
- for i := 0; i < len(o.opnds); i++ {
- context := o.opnds[i]
- evaluated := context.evalPrecedence(parser, outerContext)
- differs = differs || (evaluated != context)
- if evaluated == SemanticContextNone {
- // The OR context is true if any element is true
- return SemanticContextNone
- } else if evaluated != nil {
- // Reduce the result by Skipping false elements
- operands = append(operands, evaluated)
- }
- }
- if !differs {
- return o
- }
- if len(operands) == 0 {
- // all elements were false, so the OR context is false
- return nil
- }
- var result SemanticContext
-
- for _, o := range operands {
- if result == nil {
- result = o
- } else {
- result = SemanticContextorContext(result, o)
- }
- }
-
- return result
-}
-
-func (o *OR) String() string {
- s := ""
-
- for _, o := range o.opnds {
- s += "|| " + fmt.Sprint(o)
- }
-
- if len(s) > 3 {
- return s[0:3]
- }
-
- return s
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/statistics.go b/vendor/github.com/antlr4-go/antlr/v4/statistics.go
deleted file mode 100644
index 70c0673a0..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/statistics.go
+++ /dev/null
@@ -1,281 +0,0 @@
-//go:build antlr.stats
-
-package antlr
-
-import (
- "fmt"
- "log"
- "os"
- "path/filepath"
- "sort"
- "strconv"
- "sync"
-)
-
-// This file allows the user to collect statistics about the runtime of the ANTLR runtime. It is not enabled by default
-// and so incurs no time penalty. To enable it, you must build the runtime with the antlr.stats build tag.
-//
-
-// Tells various components to collect statistics - because it is only true when this file is included, it will
-// allow the compiler to completely eliminate all the code that is only used when collecting statistics.
-const collectStats = true
-
-// goRunStats is a collection of all the various data the ANTLR runtime has collected about a particular run.
-// It is exported so that it can be used by others to look for things that are not already looked for in the
-// runtime statistics.
-type goRunStats struct {
-
- // jStats is a slice of all the [JStatRec] records that have been created, which is one for EVERY collection created
- // during a run. It is exported so that it can be used by others to look for things that are not already looked for
- // within this package.
- //
- jStats []*JStatRec
- jStatsLock sync.RWMutex
- topN int
- topNByMax []*JStatRec
- topNByUsed []*JStatRec
- unusedCollections map[CollectionSource]int
- counts map[CollectionSource]int
-}
-
-const (
- collectionsFile = "collections"
-)
-
-var (
- Statistics = &goRunStats{
- topN: 10,
- }
-)
-
-type statsOption func(*goRunStats) error
-
-// Configure allows the statistics system to be configured as the user wants and override the defaults
-func (s *goRunStats) Configure(options ...statsOption) error {
- for _, option := range options {
- err := option(s)
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-// WithTopN sets the number of things to list in the report when we are concerned with the top N things.
-//
-// For example, if you want to see the top 20 collections by size, you can do:
-//
-// antlr.Statistics.Configure(antlr.WithTopN(20))
-func WithTopN(topN int) statsOption {
- return func(s *goRunStats) error {
- s.topN = topN
- return nil
- }
-}
-
-// Analyze looks through all the statistical records and computes all the outputs that might be useful to the user.
-//
-// The function gathers and analyzes a number of statistics about any particular run of
-// an ANTLR generated recognizer. In the vast majority of cases, the statistics are only
-// useful to maintainers of ANTLR itself, but they can be useful to users as well. They may be
-// especially useful in tracking down bugs or performance problems when an ANTLR user could
-// supply the output from this package, but cannot supply the grammar file(s) they are using, even
-// privately to the maintainers.
-//
-// The statistics are gathered by the runtime itself, and are not gathered by the parser or lexer, but the user
-// must call this function their selves to analyze the statistics. This is because none of the infrastructure is
-// extant unless the calling program is built with the antlr.stats tag like so:
-//
-// go build -tags antlr.stats .
-//
-// When a program is built with the antlr.stats tag, the Statistics object is created and available outside
-// the package. The user can then call the [Statistics.Analyze] function to analyze the statistics and then call the
-// [Statistics.Report] function to report the statistics.
-//
-// Please forward any questions about this package to the ANTLR discussion groups on GitHub or send to them to
-// me [Jim Idle] directly at jimi@idle.ws
-//
-// [Jim Idle]: https:://github.com/jim-idle
-func (s *goRunStats) Analyze() {
-
- // Look for anything that looks strange and record it in our local maps etc for the report to present it
- //
- s.CollectionAnomalies()
- s.TopNCollections()
-}
-
-// TopNCollections looks through all the statistical records and gathers the top ten collections by size.
-func (s *goRunStats) TopNCollections() {
-
- // Let's sort the stat records by MaxSize
- //
- sort.Slice(s.jStats, func(i, j int) bool {
- return s.jStats[i].MaxSize > s.jStats[j].MaxSize
- })
-
- for i := 0; i < len(s.jStats) && i < s.topN; i++ {
- s.topNByMax = append(s.topNByMax, s.jStats[i])
- }
-
- // Sort by the number of times used
- //
- sort.Slice(s.jStats, func(i, j int) bool {
- return s.jStats[i].Gets+s.jStats[i].Puts > s.jStats[j].Gets+s.jStats[j].Puts
- })
- for i := 0; i < len(s.jStats) && i < s.topN; i++ {
- s.topNByUsed = append(s.topNByUsed, s.jStats[i])
- }
-}
-
-// Report dumps a markdown formatted report of all the statistics collected during a run to the given dir output
-// path, which should represent a directory. Generated files will be prefixed with the given prefix and will be
-// given a type name such as `anomalies` and a time stamp such as `2021-09-01T12:34:56` and a .md suffix.
-func (s *goRunStats) Report(dir string, prefix string) error {
-
- isDir, err := isDirectory(dir)
- switch {
- case err != nil:
- return err
- case !isDir:
- return fmt.Errorf("output directory `%s` is not a directory", dir)
- }
- s.reportCollections(dir, prefix)
-
- // Clean out any old data in case the user forgets
- //
- s.Reset()
- return nil
-}
-
-func (s *goRunStats) Reset() {
- s.jStats = nil
- s.topNByUsed = nil
- s.topNByMax = nil
-}
-
-func (s *goRunStats) reportCollections(dir, prefix string) {
- cname := filepath.Join(dir, ".asciidoctor")
- // If the file doesn't exist, create it, or append to the file
- f, err := os.OpenFile(cname, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
- if err != nil {
- log.Fatal(err)
- }
- _, _ = f.WriteString(`// .asciidoctorconfig
-++++
-
-++++`)
- _ = f.Close()
-
- fname := filepath.Join(dir, prefix+"_"+"_"+collectionsFile+"_"+".adoc")
- // If the file doesn't exist, create it, or append to the file
- f, err = os.OpenFile(fname, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
- if err != nil {
- log.Fatal(err)
- }
- defer func(f *os.File) {
- err := f.Close()
- if err != nil {
- log.Fatal(err)
- }
- }(f)
- _, _ = f.WriteString("= Collections for " + prefix + "\n\n")
-
- _, _ = f.WriteString("== Summary\n")
-
- if s.unusedCollections != nil {
- _, _ = f.WriteString("=== Unused Collections\n")
- _, _ = f.WriteString("Unused collections incur a penalty for allocation that makes them a candidate for either\n")
- _, _ = f.WriteString(" removal or optimization. If you are using a collection that is not used, you should\n")
- _, _ = f.WriteString(" consider removing it. If you are using a collection that is used, but not very often,\n")
- _, _ = f.WriteString(" you should consider using lazy initialization to defer the allocation until it is\n")
- _, _ = f.WriteString(" actually needed.\n\n")
-
- _, _ = f.WriteString("\n.Unused collections\n")
- _, _ = f.WriteString(`[cols="<3,>1"]` + "\n\n")
- _, _ = f.WriteString("|===\n")
- _, _ = f.WriteString("| Type | Count\n")
-
- for k, v := range s.unusedCollections {
- _, _ = f.WriteString("| " + CollectionDescriptors[k].SybolicName + " | " + strconv.Itoa(v) + "\n")
- }
- f.WriteString("|===\n\n")
- }
-
- _, _ = f.WriteString("\n.Summary of Collections\n")
- _, _ = f.WriteString(`[cols="<3,>1"]` + "\n\n")
- _, _ = f.WriteString("|===\n")
- _, _ = f.WriteString("| Type | Count\n")
- for k, v := range s.counts {
- _, _ = f.WriteString("| " + CollectionDescriptors[k].SybolicName + " | " + strconv.Itoa(v) + "\n")
- }
- _, _ = f.WriteString("| Total | " + strconv.Itoa(len(s.jStats)) + "\n")
- _, _ = f.WriteString("|===\n\n")
-
- _, _ = f.WriteString("\n.Summary of Top " + strconv.Itoa(s.topN) + " Collections by MaxSize\n")
- _, _ = f.WriteString(`[cols="<1,<3,>1,>1,>1,>1"]` + "\n\n")
- _, _ = f.WriteString("|===\n")
- _, _ = f.WriteString("| Source | Description | MaxSize | EndSize | Puts | Gets\n")
- for _, c := range s.topNByMax {
- _, _ = f.WriteString("| " + CollectionDescriptors[c.Source].SybolicName + "\n")
- _, _ = f.WriteString("| " + c.Description + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.MaxSize) + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.CurSize) + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.Puts) + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.Gets) + "\n")
- _, _ = f.WriteString("\n")
- }
- _, _ = f.WriteString("|===\n\n")
-
- _, _ = f.WriteString("\n.Summary of Top " + strconv.Itoa(s.topN) + " Collections by Access\n")
- _, _ = f.WriteString(`[cols="<1,<3,>1,>1,>1,>1,>1"]` + "\n\n")
- _, _ = f.WriteString("|===\n")
- _, _ = f.WriteString("| Source | Description | MaxSize | EndSize | Puts | Gets | P+G\n")
- for _, c := range s.topNByUsed {
- _, _ = f.WriteString("| " + CollectionDescriptors[c.Source].SybolicName + "\n")
- _, _ = f.WriteString("| " + c.Description + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.MaxSize) + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.CurSize) + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.Puts) + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.Gets) + "\n")
- _, _ = f.WriteString("| " + strconv.Itoa(c.Gets+c.Puts) + "\n")
- _, _ = f.WriteString("\n")
- }
- _, _ = f.WriteString("|===\n\n")
-}
-
-// AddJStatRec adds a [JStatRec] record to the [goRunStats] collection when build runtimeConfig antlr.stats is enabled.
-func (s *goRunStats) AddJStatRec(rec *JStatRec) {
- s.jStatsLock.Lock()
- defer s.jStatsLock.Unlock()
- s.jStats = append(s.jStats, rec)
-}
-
-// CollectionAnomalies looks through all the statistical records and gathers any anomalies that have been found.
-func (s *goRunStats) CollectionAnomalies() {
- s.jStatsLock.RLock()
- defer s.jStatsLock.RUnlock()
- s.counts = make(map[CollectionSource]int, len(s.jStats))
- for _, c := range s.jStats {
-
- // Accumlate raw counts
- //
- s.counts[c.Source]++
-
- // Look for allocated but unused collections and count them
- if c.MaxSize == 0 && c.Puts == 0 {
- if s.unusedCollections == nil {
- s.unusedCollections = make(map[CollectionSource]int)
- }
- s.unusedCollections[c.Source]++
- }
- if c.MaxSize > 6000 {
- fmt.Println("Collection ", c.Description, "accumulated a max size of ", c.MaxSize, " - this is probably too large and indicates a poorly formed grammar")
- }
- }
-
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/stats_data.go b/vendor/github.com/antlr4-go/antlr/v4/stats_data.go
deleted file mode 100644
index 4d9eb94e5..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/stats_data.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package antlr
-
-// A JStatRec is a record of a particular use of a [JStore], [JMap] or JPCMap] collection. Typically, it will be
-// used to look for unused collections that wre allocated anyway, problems with hash bucket clashes, and anomalies
-// such as huge numbers of Gets with no entries found GetNoEnt. You can refer to the CollectionAnomalies() function
-// for ideas on what can be gleaned from these statistics about collections.
-type JStatRec struct {
- Source CollectionSource
- MaxSize int
- CurSize int
- Gets int
- GetHits int
- GetMisses int
- GetHashConflicts int
- GetNoEnt int
- Puts int
- PutHits int
- PutMisses int
- PutHashConflicts int
- MaxSlotSize int
- Description string
- CreateStack []byte
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/token.go b/vendor/github.com/antlr4-go/antlr/v4/token.go
deleted file mode 100644
index 9670efb82..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/token.go
+++ /dev/null
@@ -1,213 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "strconv"
- "strings"
-)
-
-type TokenSourceCharStreamPair struct {
- tokenSource TokenSource
- charStream CharStream
-}
-
-// A token has properties: text, type, line, character position in the line
-// (so we can ignore tabs), token channel, index, and source from which
-// we obtained this token.
-
-type Token interface {
- GetSource() *TokenSourceCharStreamPair
- GetTokenType() int
- GetChannel() int
- GetStart() int
- GetStop() int
- GetLine() int
- GetColumn() int
-
- GetText() string
- SetText(s string)
-
- GetTokenIndex() int
- SetTokenIndex(v int)
-
- GetTokenSource() TokenSource
- GetInputStream() CharStream
-
- String() string
-}
-
-type BaseToken struct {
- source *TokenSourceCharStreamPair
- tokenType int // token type of the token
- channel int // The parser ignores everything not on DEFAULT_CHANNEL
- start int // optional return -1 if not implemented.
- stop int // optional return -1 if not implemented.
- tokenIndex int // from 0..n-1 of the token object in the input stream
- line int // line=1..n of the 1st character
- column int // beginning of the line at which it occurs, 0..n-1
- text string // text of the token.
- readOnly bool
-}
-
-const (
- TokenInvalidType = 0
-
- // TokenEpsilon - during lookahead operations, this "token" signifies we hit the rule end [ATN] state
- // and did not follow it despite needing to.
- TokenEpsilon = -2
-
- TokenMinUserTokenType = 1
-
- TokenEOF = -1
-
- // TokenDefaultChannel is the default channel upon which tokens are sent to the parser.
- //
- // All tokens go to the parser (unless [Skip] is called in the lexer rule)
- // on a particular "channel". The parser tunes to a particular channel
- // so that whitespace etc... can go to the parser on a "hidden" channel.
- TokenDefaultChannel = 0
-
- // TokenHiddenChannel defines the normal hidden channel - the parser wil not see tokens that are not on [TokenDefaultChannel].
- //
- // Anything on a different channel than TokenDefaultChannel is not parsed by parser.
- TokenHiddenChannel = 1
-)
-
-func (b *BaseToken) GetChannel() int {
- return b.channel
-}
-
-func (b *BaseToken) GetStart() int {
- return b.start
-}
-
-func (b *BaseToken) GetStop() int {
- return b.stop
-}
-
-func (b *BaseToken) GetLine() int {
- return b.line
-}
-
-func (b *BaseToken) GetColumn() int {
- return b.column
-}
-
-func (b *BaseToken) GetTokenType() int {
- return b.tokenType
-}
-
-func (b *BaseToken) GetSource() *TokenSourceCharStreamPair {
- return b.source
-}
-
-func (b *BaseToken) GetTokenIndex() int {
- return b.tokenIndex
-}
-
-func (b *BaseToken) SetTokenIndex(v int) {
- b.tokenIndex = v
-}
-
-func (b *BaseToken) GetTokenSource() TokenSource {
- return b.source.tokenSource
-}
-
-func (b *BaseToken) GetInputStream() CharStream {
- return b.source.charStream
-}
-
-type CommonToken struct {
- BaseToken
-}
-
-func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start, stop int) *CommonToken {
-
- t := &CommonToken{
- BaseToken: BaseToken{
- source: source,
- tokenType: tokenType,
- channel: channel,
- start: start,
- stop: stop,
- tokenIndex: -1,
- },
- }
-
- if t.source.tokenSource != nil {
- t.line = source.tokenSource.GetLine()
- t.column = source.tokenSource.GetCharPositionInLine()
- } else {
- t.column = -1
- }
- return t
-}
-
-// An empty {@link Pair} which is used as the default value of
-// {@link //source} for tokens that do not have a source.
-
-//CommonToken.EMPTY_SOURCE = [ nil, nil ]
-
-// Constructs a New{@link CommonToken} as a copy of another {@link Token}.
-//
-//
-// If {@code oldToken} is also a {@link CommonToken} instance, the newly
-// constructed token will share a reference to the {@link //text} field and
-// the {@link Pair} stored in {@link //source}. Otherwise, {@link //text} will
-// be assigned the result of calling {@link //GetText}, and {@link //source}
-// will be constructed from the result of {@link Token//GetTokenSource} and
-// {@link Token//GetInputStream}.
-//
-// @param oldToken The token to copy.
-func (c *CommonToken) clone() *CommonToken {
- t := NewCommonToken(c.source, c.tokenType, c.channel, c.start, c.stop)
- t.tokenIndex = c.GetTokenIndex()
- t.line = c.GetLine()
- t.column = c.GetColumn()
- t.text = c.GetText()
- return t
-}
-
-func (c *CommonToken) GetText() string {
- if c.text != "" {
- return c.text
- }
- input := c.GetInputStream()
- if input == nil {
- return ""
- }
- n := input.Size()
- if c.start < n && c.stop < n {
- return input.GetTextFromInterval(NewInterval(c.start, c.stop))
- }
- return ""
-}
-
-func (c *CommonToken) SetText(text string) {
- c.text = text
-}
-
-func (c *CommonToken) String() string {
- txt := c.GetText()
- if txt != "" {
- txt = strings.Replace(txt, "\n", "\\n", -1)
- txt = strings.Replace(txt, "\r", "\\r", -1)
- txt = strings.Replace(txt, "\t", "\\t", -1)
- } else {
- txt = ""
- }
-
- var ch string
- if c.channel > 0 {
- ch = ",channel=" + strconv.Itoa(c.channel)
- } else {
- ch = ""
- }
-
- return "[@" + strconv.Itoa(c.tokenIndex) + "," + strconv.Itoa(c.start) + ":" + strconv.Itoa(c.stop) + "='" +
- txt + "',<" + strconv.Itoa(c.tokenType) + ">" +
- ch + "," + strconv.Itoa(c.line) + ":" + strconv.Itoa(c.column) + "]"
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/token_source.go b/vendor/github.com/antlr4-go/antlr/v4/token_source.go
deleted file mode 100644
index a3f36eaa6..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/token_source.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-type TokenSource interface {
- NextToken() Token
- Skip()
- More()
- GetLine() int
- GetCharPositionInLine() int
- GetInputStream() CharStream
- GetSourceName() string
- setTokenFactory(factory TokenFactory)
- GetTokenFactory() TokenFactory
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/token_stream.go b/vendor/github.com/antlr4-go/antlr/v4/token_stream.go
deleted file mode 100644
index bf4ff6633..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/token_stream.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-type TokenStream interface {
- IntStream
-
- LT(k int) Token
- Reset()
-
- Get(index int) Token
- GetTokenSource() TokenSource
- SetTokenSource(TokenSource)
-
- GetAllText() string
- GetTextFromInterval(Interval) string
- GetTextFromRuleContext(RuleContext) string
- GetTextFromTokens(Token, Token) string
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go b/vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go
deleted file mode 100644
index ccf59b465..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go
+++ /dev/null
@@ -1,662 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "bytes"
- "fmt"
-)
-
-//
-// Useful for rewriting out a buffered input token stream after doing some
-// augmentation or other manipulations on it.
-
-//
-// You can insert stuff, replace, and delete chunks. Note that the operations
-// are done lazily--only if you convert the buffer to a {@link String} with
-// {@link TokenStream#getText()}. This is very efficient because you are not
-// moving data around all the time. As the buffer of tokens is converted to
-// strings, the {@link #getText()} method(s) scan the input token stream and
-// check to see if there is an operation at the current index. If so, the
-// operation is done and then normal {@link String} rendering continues on the
-// buffer. This is like having multiple Turing machine instruction streams
-// (programs) operating on a single input tape. :)
-//
-
-// This rewriter makes no modifications to the token stream. It does not ask the
-// stream to fill itself up nor does it advance the input cursor. The token
-// stream {@link TokenStream#index()} will return the same value before and
-// after any {@link #getText()} call.
-
-//
-// The rewriter only works on tokens that you have in the buffer and ignores the
-// current input cursor. If you are buffering tokens on-demand, calling
-// {@link #getText()} halfway through the input will only do rewrites for those
-// tokens in the first half of the file.
-
-//
-// Since the operations are done lazily at {@link #getText}-time, operations do
-// not screw up the token index values. That is, an insert operation at token
-// index {@code i} does not change the index values for tokens
-// {@code i}+1..n-1.
-
-//
-// Because operations never actually alter the buffer, you may always get the
-// original token stream back without undoing anything. Since the instructions
-// are queued up, you can easily simulate transactions and roll back any changes
-// if there is an error just by removing instructions. For example,
-
-//
-// CharStream input = new ANTLRFileStream("input");
-// TLexer lex = new TLexer(input);
-// CommonTokenStream tokens = new CommonTokenStream(lex);
-// T parser = new T(tokens);
-// TokenStreamRewriter rewriter = new TokenStreamRewriter(tokens);
-// parser.startRule();
-//
-
-//
-// Then in the rules, you can execute (assuming rewriter is visible):
-
-//
-// Token t,u;
-// ...
-// rewriter.insertAfter(t, "text to put after t");}
-// rewriter.insertAfter(u, "text after u");}
-// System.out.println(rewriter.getText());
-//
-
-//
-// You can also have multiple "instruction streams" and get multiple rewrites
-// from a single pass over the input. Just name the instruction streams and use
-// that name again when printing the buffer. This could be useful for generating
-// a C file and also its header file--all from the same buffer:
-
-//
-// rewriter.insertAfter("pass1", t, "text to put after t");}
-// rewriter.insertAfter("pass2", u, "text after u");}
-// System.out.println(rewriter.getText("pass1"));
-// System.out.println(rewriter.getText("pass2"));
-//
-
-//
-// If you don't use named rewrite streams, a "default" stream is used as the
-// first example shows.
-
-const (
- DefaultProgramName = "default"
- ProgramInitSize = 100
- MinTokenIndex = 0
-)
-
-// Define the rewrite operation hierarchy
-
-type RewriteOperation interface {
-
- // Execute the rewrite operation by possibly adding to the buffer.
- // Return the index of the next token to operate on.
- Execute(buffer *bytes.Buffer) int
- String() string
- GetInstructionIndex() int
- GetIndex() int
- GetText() string
- GetOpName() string
- GetTokens() TokenStream
- SetInstructionIndex(val int)
- SetIndex(int)
- SetText(string)
- SetOpName(string)
- SetTokens(TokenStream)
-}
-
-type BaseRewriteOperation struct {
- //Current index of rewrites list
- instructionIndex int
- //Token buffer index
- index int
- //Substitution text
- text string
- //Actual operation name
- opName string
- //Pointer to token steam
- tokens TokenStream
-}
-
-func (op *BaseRewriteOperation) GetInstructionIndex() int {
- return op.instructionIndex
-}
-
-func (op *BaseRewriteOperation) GetIndex() int {
- return op.index
-}
-
-func (op *BaseRewriteOperation) GetText() string {
- return op.text
-}
-
-func (op *BaseRewriteOperation) GetOpName() string {
- return op.opName
-}
-
-func (op *BaseRewriteOperation) GetTokens() TokenStream {
- return op.tokens
-}
-
-func (op *BaseRewriteOperation) SetInstructionIndex(val int) {
- op.instructionIndex = val
-}
-
-func (op *BaseRewriteOperation) SetIndex(val int) {
- op.index = val
-}
-
-func (op *BaseRewriteOperation) SetText(val string) {
- op.text = val
-}
-
-func (op *BaseRewriteOperation) SetOpName(val string) {
- op.opName = val
-}
-
-func (op *BaseRewriteOperation) SetTokens(val TokenStream) {
- op.tokens = val
-}
-
-func (op *BaseRewriteOperation) Execute(_ *bytes.Buffer) int {
- return op.index
-}
-
-func (op *BaseRewriteOperation) String() string {
- return fmt.Sprintf("<%s@%d:\"%s\">",
- op.opName,
- op.tokens.Get(op.GetIndex()),
- op.text,
- )
-
-}
-
-type InsertBeforeOp struct {
- BaseRewriteOperation
-}
-
-func NewInsertBeforeOp(index int, text string, stream TokenStream) *InsertBeforeOp {
- return &InsertBeforeOp{BaseRewriteOperation: BaseRewriteOperation{
- index: index,
- text: text,
- opName: "InsertBeforeOp",
- tokens: stream,
- }}
-}
-
-func (op *InsertBeforeOp) Execute(buffer *bytes.Buffer) int {
- buffer.WriteString(op.text)
- if op.tokens.Get(op.index).GetTokenType() != TokenEOF {
- buffer.WriteString(op.tokens.Get(op.index).GetText())
- }
- return op.index + 1
-}
-
-func (op *InsertBeforeOp) String() string {
- return op.BaseRewriteOperation.String()
-}
-
-// InsertAfterOp distinguishes between insert after/before to do the "insert after" instructions
-// first and then the "insert before" instructions at same index. Implementation
-// of "insert after" is "insert before index+1".
-type InsertAfterOp struct {
- BaseRewriteOperation
-}
-
-func NewInsertAfterOp(index int, text string, stream TokenStream) *InsertAfterOp {
- return &InsertAfterOp{
- BaseRewriteOperation: BaseRewriteOperation{
- index: index + 1,
- text: text,
- tokens: stream,
- },
- }
-}
-
-func (op *InsertAfterOp) Execute(buffer *bytes.Buffer) int {
- buffer.WriteString(op.text)
- if op.tokens.Get(op.index).GetTokenType() != TokenEOF {
- buffer.WriteString(op.tokens.Get(op.index).GetText())
- }
- return op.index + 1
-}
-
-func (op *InsertAfterOp) String() string {
- return op.BaseRewriteOperation.String()
-}
-
-// ReplaceOp tries to replace range from x..y with (y-x)+1 ReplaceOp
-// instructions.
-type ReplaceOp struct {
- BaseRewriteOperation
- LastIndex int
-}
-
-func NewReplaceOp(from, to int, text string, stream TokenStream) *ReplaceOp {
- return &ReplaceOp{
- BaseRewriteOperation: BaseRewriteOperation{
- index: from,
- text: text,
- opName: "ReplaceOp",
- tokens: stream,
- },
- LastIndex: to,
- }
-}
-
-func (op *ReplaceOp) Execute(buffer *bytes.Buffer) int {
- if op.text != "" {
- buffer.WriteString(op.text)
- }
- return op.LastIndex + 1
-}
-
-func (op *ReplaceOp) String() string {
- if op.text == "" {
- return fmt.Sprintf("",
- op.tokens.Get(op.index), op.tokens.Get(op.LastIndex))
- }
- return fmt.Sprintf("",
- op.tokens.Get(op.index), op.tokens.Get(op.LastIndex), op.text)
-}
-
-type TokenStreamRewriter struct {
- //Our source stream
- tokens TokenStream
- // You may have multiple, named streams of rewrite operations.
- // I'm calling these things "programs."
- // Maps String (name) → rewrite (List)
- programs map[string][]RewriteOperation
- lastRewriteTokenIndexes map[string]int
-}
-
-func NewTokenStreamRewriter(tokens TokenStream) *TokenStreamRewriter {
- return &TokenStreamRewriter{
- tokens: tokens,
- programs: map[string][]RewriteOperation{
- DefaultProgramName: make([]RewriteOperation, 0, ProgramInitSize),
- },
- lastRewriteTokenIndexes: map[string]int{},
- }
-}
-
-func (tsr *TokenStreamRewriter) GetTokenStream() TokenStream {
- return tsr.tokens
-}
-
-// Rollback the instruction stream for a program so that
-// the indicated instruction (via instructionIndex) is no
-// longer in the stream. UNTESTED!
-func (tsr *TokenStreamRewriter) Rollback(programName string, instructionIndex int) {
- is, ok := tsr.programs[programName]
- if ok {
- tsr.programs[programName] = is[MinTokenIndex:instructionIndex]
- }
-}
-
-func (tsr *TokenStreamRewriter) RollbackDefault(instructionIndex int) {
- tsr.Rollback(DefaultProgramName, instructionIndex)
-}
-
-// DeleteProgram Reset the program so that no instructions exist
-func (tsr *TokenStreamRewriter) DeleteProgram(programName string) {
- tsr.Rollback(programName, MinTokenIndex) //TODO: double test on that cause lower bound is not included
-}
-
-func (tsr *TokenStreamRewriter) DeleteProgramDefault() {
- tsr.DeleteProgram(DefaultProgramName)
-}
-
-func (tsr *TokenStreamRewriter) InsertAfter(programName string, index int, text string) {
- // to insert after, just insert before next index (even if past end)
- var op RewriteOperation = NewInsertAfterOp(index, text, tsr.tokens)
- rewrites := tsr.GetProgram(programName)
- op.SetInstructionIndex(len(rewrites))
- tsr.AddToProgram(programName, op)
-}
-
-func (tsr *TokenStreamRewriter) InsertAfterDefault(index int, text string) {
- tsr.InsertAfter(DefaultProgramName, index, text)
-}
-
-func (tsr *TokenStreamRewriter) InsertAfterToken(programName string, token Token, text string) {
- tsr.InsertAfter(programName, token.GetTokenIndex(), text)
-}
-
-func (tsr *TokenStreamRewriter) InsertBefore(programName string, index int, text string) {
- var op RewriteOperation = NewInsertBeforeOp(index, text, tsr.tokens)
- rewrites := tsr.GetProgram(programName)
- op.SetInstructionIndex(len(rewrites))
- tsr.AddToProgram(programName, op)
-}
-
-func (tsr *TokenStreamRewriter) InsertBeforeDefault(index int, text string) {
- tsr.InsertBefore(DefaultProgramName, index, text)
-}
-
-func (tsr *TokenStreamRewriter) InsertBeforeToken(programName string, token Token, text string) {
- tsr.InsertBefore(programName, token.GetTokenIndex(), text)
-}
-
-func (tsr *TokenStreamRewriter) Replace(programName string, from, to int, text string) {
- if from > to || from < 0 || to < 0 || to >= tsr.tokens.Size() {
- panic(fmt.Sprintf("replace: range invalid: %d..%d(size=%d)",
- from, to, tsr.tokens.Size()))
- }
- var op RewriteOperation = NewReplaceOp(from, to, text, tsr.tokens)
- rewrites := tsr.GetProgram(programName)
- op.SetInstructionIndex(len(rewrites))
- tsr.AddToProgram(programName, op)
-}
-
-func (tsr *TokenStreamRewriter) ReplaceDefault(from, to int, text string) {
- tsr.Replace(DefaultProgramName, from, to, text)
-}
-
-func (tsr *TokenStreamRewriter) ReplaceDefaultPos(index int, text string) {
- tsr.ReplaceDefault(index, index, text)
-}
-
-func (tsr *TokenStreamRewriter) ReplaceToken(programName string, from, to Token, text string) {
- tsr.Replace(programName, from.GetTokenIndex(), to.GetTokenIndex(), text)
-}
-
-func (tsr *TokenStreamRewriter) ReplaceTokenDefault(from, to Token, text string) {
- tsr.ReplaceToken(DefaultProgramName, from, to, text)
-}
-
-func (tsr *TokenStreamRewriter) ReplaceTokenDefaultPos(index Token, text string) {
- tsr.ReplaceTokenDefault(index, index, text)
-}
-
-func (tsr *TokenStreamRewriter) Delete(programName string, from, to int) {
- tsr.Replace(programName, from, to, "")
-}
-
-func (tsr *TokenStreamRewriter) DeleteDefault(from, to int) {
- tsr.Delete(DefaultProgramName, from, to)
-}
-
-func (tsr *TokenStreamRewriter) DeleteDefaultPos(index int) {
- tsr.DeleteDefault(index, index)
-}
-
-func (tsr *TokenStreamRewriter) DeleteToken(programName string, from, to Token) {
- tsr.ReplaceToken(programName, from, to, "")
-}
-
-func (tsr *TokenStreamRewriter) DeleteTokenDefault(from, to Token) {
- tsr.DeleteToken(DefaultProgramName, from, to)
-}
-
-func (tsr *TokenStreamRewriter) GetLastRewriteTokenIndex(programName string) int {
- i, ok := tsr.lastRewriteTokenIndexes[programName]
- if !ok {
- return -1
- }
- return i
-}
-
-func (tsr *TokenStreamRewriter) GetLastRewriteTokenIndexDefault() int {
- return tsr.GetLastRewriteTokenIndex(DefaultProgramName)
-}
-
-func (tsr *TokenStreamRewriter) SetLastRewriteTokenIndex(programName string, i int) {
- tsr.lastRewriteTokenIndexes[programName] = i
-}
-
-func (tsr *TokenStreamRewriter) InitializeProgram(name string) []RewriteOperation {
- is := make([]RewriteOperation, 0, ProgramInitSize)
- tsr.programs[name] = is
- return is
-}
-
-func (tsr *TokenStreamRewriter) AddToProgram(name string, op RewriteOperation) {
- is := tsr.GetProgram(name)
- is = append(is, op)
- tsr.programs[name] = is
-}
-
-func (tsr *TokenStreamRewriter) GetProgram(name string) []RewriteOperation {
- is, ok := tsr.programs[name]
- if !ok {
- is = tsr.InitializeProgram(name)
- }
- return is
-}
-
-// GetTextDefault returns the text from the original tokens altered per the
-// instructions given to this rewriter.
-func (tsr *TokenStreamRewriter) GetTextDefault() string {
- return tsr.GetText(
- DefaultProgramName,
- NewInterval(0, tsr.tokens.Size()-1))
-}
-
-// GetText returns the text from the original tokens altered per the
-// instructions given to this rewriter.
-func (tsr *TokenStreamRewriter) GetText(programName string, interval Interval) string {
- rewrites := tsr.programs[programName]
- start := interval.Start
- stop := interval.Stop
- // ensure start/end are in range
- stop = min(stop, tsr.tokens.Size()-1)
- start = max(start, 0)
- if len(rewrites) == 0 {
- return tsr.tokens.GetTextFromInterval(interval) // no instructions to execute
- }
- buf := bytes.Buffer{}
- // First, optimize instruction stream
- indexToOp := reduceToSingleOperationPerIndex(rewrites)
- // Walk buffer, executing instructions and emitting tokens
- for i := start; i <= stop && i < tsr.tokens.Size(); {
- op := indexToOp[i]
- delete(indexToOp, i) // remove so any left have index size-1
- t := tsr.tokens.Get(i)
- if op == nil {
- // no operation at that index, just dump token
- if t.GetTokenType() != TokenEOF {
- buf.WriteString(t.GetText())
- }
- i++ // move to next token
- } else {
- i = op.Execute(&buf) // execute operation and skip
- }
- }
- // include stuff after end if it's last index in buffer
- // So, if they did an insertAfter(lastValidIndex, "foo"), include
- // foo if end==lastValidIndex.
- if stop == tsr.tokens.Size()-1 {
- // Scan any remaining operations after last token
- // should be included (they will be inserts).
- for _, op := range indexToOp {
- if op.GetIndex() >= tsr.tokens.Size()-1 {
- buf.WriteString(op.GetText())
- }
- }
- }
- return buf.String()
-}
-
-// reduceToSingleOperationPerIndex combines operations and report invalid operations (like
-// overlapping replaces that are not completed nested). Inserts to
-// same index need to be combined etc...
-//
-// Here are the cases:
-//
-// I.i.u I.j.v leave alone, non-overlapping
-// I.i.u I.i.v combine: Iivu
-//
-// R.i-j.u R.x-y.v | i-j in x-y delete first R
-// R.i-j.u R.i-j.v delete first R
-// R.i-j.u R.x-y.v | x-y in i-j ERROR
-// R.i-j.u R.x-y.v | boundaries overlap ERROR
-//
-// Delete special case of replace (text==null):
-// D.i-j.u D.x-y.v | boundaries overlap combine to max(min)..max(right)
-//
-// I.i.u R.x-y.v | i in (x+1)-y delete I (since insert before
-// we're not deleting i)
-// I.i.u R.x-y.v | i not in (x+1)-y leave alone, non-overlapping
-// R.x-y.v I.i.u | i in x-y ERROR
-// R.x-y.v I.x.u R.x-y.uv (combine, delete I)
-// R.x-y.v I.i.u | i not in x-y leave alone, non-overlapping
-//
-// I.i.u = insert u before op @ index i
-// R.x-y.u = replace x-y indexed tokens with u
-//
-// First we need to examine replaces. For any replace op:
-//
-// 1. wipe out any insertions before op within that range.
-// 2. Drop any replace op before that is contained completely within
-// that range.
-// 3. Throw exception upon boundary overlap with any previous replace.
-//
-// Then we can deal with inserts:
-//
-// 1. for any inserts to same index, combine even if not adjacent.
-// 2. for any prior replace with same left boundary, combine this
-// insert with replace and delete this 'replace'.
-// 3. throw exception if index in same range as previous replace
-//
-// Don't actually delete; make op null in list. Easier to walk list.
-// Later we can throw as we add to index → op map.
-//
-// Note that I.2 R.2-2 will wipe out I.2 even though, technically, the
-// inserted stuff would be before the 'replace' range. But, if you
-// add tokens in front of a method body '{' and then delete the method
-// body, I think the stuff before the '{' you added should disappear too.
-//
-// The func returns a map from token index to operation.
-func reduceToSingleOperationPerIndex(rewrites []RewriteOperation) map[int]RewriteOperation {
- // WALK REPLACES
- for i := 0; i < len(rewrites); i++ {
- op := rewrites[i]
- if op == nil {
- continue
- }
- rop, ok := op.(*ReplaceOp)
- if !ok {
- continue
- }
- // Wipe prior inserts within range
- for j := 0; j < i && j < len(rewrites); j++ {
- if iop, ok := rewrites[j].(*InsertBeforeOp); ok {
- if iop.index == rop.index {
- // E.g., insert before 2, delete 2..2; update replace
- // text to include insert before, kill insert
- rewrites[iop.instructionIndex] = nil
- if rop.text != "" {
- rop.text = iop.text + rop.text
- } else {
- rop.text = iop.text
- }
- } else if iop.index > rop.index && iop.index <= rop.LastIndex {
- // delete insert as it's a no-op.
- rewrites[iop.instructionIndex] = nil
- }
- }
- }
- // Drop any prior replaces contained within
- for j := 0; j < i && j < len(rewrites); j++ {
- if prevop, ok := rewrites[j].(*ReplaceOp); ok {
- if prevop.index >= rop.index && prevop.LastIndex <= rop.LastIndex {
- // delete replace as it's a no-op.
- rewrites[prevop.instructionIndex] = nil
- continue
- }
- // throw exception unless disjoint or identical
- disjoint := prevop.LastIndex < rop.index || prevop.index > rop.LastIndex
- // Delete special case of replace (text==null):
- // D.i-j.u D.x-y.v | boundaries overlap combine to max(min)..max(right)
- if prevop.text == "" && rop.text == "" && !disjoint {
- rewrites[prevop.instructionIndex] = nil
- rop.index = min(prevop.index, rop.index)
- rop.LastIndex = max(prevop.LastIndex, rop.LastIndex)
- } else if !disjoint {
- panic("replace op boundaries of " + rop.String() + " overlap with previous " + prevop.String())
- }
- }
- }
- }
- // WALK INSERTS
- for i := 0; i < len(rewrites); i++ {
- op := rewrites[i]
- if op == nil {
- continue
- }
- //hack to replicate inheritance in composition
- _, iok := rewrites[i].(*InsertBeforeOp)
- _, aok := rewrites[i].(*InsertAfterOp)
- if !iok && !aok {
- continue
- }
- iop := rewrites[i]
- // combine current insert with prior if any at same index
- // deviating a bit from TokenStreamRewriter.java - hard to incorporate inheritance logic
- for j := 0; j < i && j < len(rewrites); j++ {
- if nextIop, ok := rewrites[j].(*InsertAfterOp); ok {
- if nextIop.index == iop.GetIndex() {
- iop.SetText(nextIop.text + iop.GetText())
- rewrites[j] = nil
- }
- }
- if prevIop, ok := rewrites[j].(*InsertBeforeOp); ok {
- if prevIop.index == iop.GetIndex() {
- iop.SetText(iop.GetText() + prevIop.text)
- rewrites[prevIop.instructionIndex] = nil
- }
- }
- }
- // look for replaces where iop.index is in range; error
- for j := 0; j < i && j < len(rewrites); j++ {
- if rop, ok := rewrites[j].(*ReplaceOp); ok {
- if iop.GetIndex() == rop.index {
- rop.text = iop.GetText() + rop.text
- rewrites[i] = nil
- continue
- }
- if iop.GetIndex() >= rop.index && iop.GetIndex() <= rop.LastIndex {
- panic("insert op " + iop.String() + " within boundaries of previous " + rop.String())
- }
- }
- }
- }
- m := map[int]RewriteOperation{}
- for i := 0; i < len(rewrites); i++ {
- op := rewrites[i]
- if op == nil {
- continue
- }
- if _, ok := m[op.GetIndex()]; ok {
- panic("should only be one op per index")
- }
- m[op.GetIndex()] = op
- }
- return m
-}
-
-/*
- Quick fixing Go lack of overloads
-*/
-
-func max(a, b int) int {
- if a > b {
- return a
- } else {
- return b
- }
-}
-func min(a, b int) int {
- if a < b {
- return a
- } else {
- return b
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/trace_listener.go b/vendor/github.com/antlr4-go/antlr/v4/trace_listener.go
deleted file mode 100644
index 7b663bf84..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/trace_listener.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import "fmt"
-
-type TraceListener struct {
- parser *BaseParser
-}
-
-func NewTraceListener(parser *BaseParser) *TraceListener {
- tl := new(TraceListener)
- tl.parser = parser
- return tl
-}
-
-func (t *TraceListener) VisitErrorNode(_ ErrorNode) {
-}
-
-func (t *TraceListener) EnterEveryRule(ctx ParserRuleContext) {
- fmt.Println("enter " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser.input.LT(1).GetText())
-}
-
-func (t *TraceListener) VisitTerminal(node TerminalNode) {
- fmt.Println("consume " + fmt.Sprint(node.GetSymbol()) + " rule " + t.parser.GetRuleNames()[t.parser.ctx.GetRuleIndex()])
-}
-
-func (t *TraceListener) ExitEveryRule(ctx ParserRuleContext) {
- fmt.Println("exit " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser.input.LT(1).GetText())
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/transition.go b/vendor/github.com/antlr4-go/antlr/v4/transition.go
deleted file mode 100644
index 313b0fc12..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/transition.go
+++ /dev/null
@@ -1,439 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "fmt"
- "strconv"
- "strings"
-)
-
-// atom, set, epsilon, action, predicate, rule transitions.
-//
-//
This is a one way link. It emanates from a state (usually via a list of
-// transitions) and has a target state.
-//
-//
Since we never have to change the ATN transitions once we construct it,
-// the states. We'll use the term Edge for the DFA to distinguish them from
-// ATN transitions.
-
-type Transition interface {
- getTarget() ATNState
- setTarget(ATNState)
- getIsEpsilon() bool
- getLabel() *IntervalSet
- getSerializationType() int
- Matches(int, int, int) bool
-}
-
-type BaseTransition struct {
- target ATNState
- isEpsilon bool
- label int
- intervalSet *IntervalSet
- serializationType int
-}
-
-func NewBaseTransition(target ATNState) *BaseTransition {
-
- if target == nil {
- panic("target cannot be nil.")
- }
-
- t := new(BaseTransition)
-
- t.target = target
- // Are we epsilon, action, sempred?
- t.isEpsilon = false
- t.intervalSet = nil
-
- return t
-}
-
-func (t *BaseTransition) getTarget() ATNState {
- return t.target
-}
-
-func (t *BaseTransition) setTarget(s ATNState) {
- t.target = s
-}
-
-func (t *BaseTransition) getIsEpsilon() bool {
- return t.isEpsilon
-}
-
-func (t *BaseTransition) getLabel() *IntervalSet {
- return t.intervalSet
-}
-
-func (t *BaseTransition) getSerializationType() int {
- return t.serializationType
-}
-
-func (t *BaseTransition) Matches(_, _, _ int) bool {
- panic("Not implemented")
-}
-
-const (
- TransitionEPSILON = 1
- TransitionRANGE = 2
- TransitionRULE = 3
- TransitionPREDICATE = 4 // e.g., {isType(input.LT(1))}?
- TransitionATOM = 5
- TransitionACTION = 6
- TransitionSET = 7 // ~(A|B) or ~atom, wildcard, which convert to next 2
- TransitionNOTSET = 8
- TransitionWILDCARD = 9
- TransitionPRECEDENCE = 10
-)
-
-//goland:noinspection GoUnusedGlobalVariable
-var TransitionserializationNames = []string{
- "INVALID",
- "EPSILON",
- "RANGE",
- "RULE",
- "PREDICATE",
- "ATOM",
- "ACTION",
- "SET",
- "NOT_SET",
- "WILDCARD",
- "PRECEDENCE",
-}
-
-//var TransitionserializationTypes struct {
-// EpsilonTransition int
-// RangeTransition int
-// RuleTransition int
-// PredicateTransition int
-// AtomTransition int
-// ActionTransition int
-// SetTransition int
-// NotSetTransition int
-// WildcardTransition int
-// PrecedencePredicateTransition int
-//}{
-// TransitionEPSILON,
-// TransitionRANGE,
-// TransitionRULE,
-// TransitionPREDICATE,
-// TransitionATOM,
-// TransitionACTION,
-// TransitionSET,
-// TransitionNOTSET,
-// TransitionWILDCARD,
-// TransitionPRECEDENCE
-//}
-
-// AtomTransition
-// TODO: make all transitions sets? no, should remove set edges
-type AtomTransition struct {
- BaseTransition
-}
-
-func NewAtomTransition(target ATNState, intervalSet int) *AtomTransition {
- t := &AtomTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionATOM,
- label: intervalSet,
- isEpsilon: false,
- },
- }
- t.intervalSet = t.makeLabel()
-
- return t
-}
-
-func (t *AtomTransition) makeLabel() *IntervalSet {
- s := NewIntervalSet()
- s.addOne(t.label)
- return s
-}
-
-func (t *AtomTransition) Matches(symbol, _, _ int) bool {
- return t.label == symbol
-}
-
-func (t *AtomTransition) String() string {
- return strconv.Itoa(t.label)
-}
-
-type RuleTransition struct {
- BaseTransition
- followState ATNState
- ruleIndex, precedence int
-}
-
-func NewRuleTransition(ruleStart ATNState, ruleIndex, precedence int, followState ATNState) *RuleTransition {
- return &RuleTransition{
- BaseTransition: BaseTransition{
- target: ruleStart,
- isEpsilon: true,
- serializationType: TransitionRULE,
- },
- ruleIndex: ruleIndex,
- precedence: precedence,
- followState: followState,
- }
-}
-
-func (t *RuleTransition) Matches(_, _, _ int) bool {
- return false
-}
-
-type EpsilonTransition struct {
- BaseTransition
- outermostPrecedenceReturn int
-}
-
-func NewEpsilonTransition(target ATNState, outermostPrecedenceReturn int) *EpsilonTransition {
- return &EpsilonTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionEPSILON,
- isEpsilon: true,
- },
- outermostPrecedenceReturn: outermostPrecedenceReturn,
- }
-}
-
-func (t *EpsilonTransition) Matches(_, _, _ int) bool {
- return false
-}
-
-func (t *EpsilonTransition) String() string {
- return "epsilon"
-}
-
-type RangeTransition struct {
- BaseTransition
- start, stop int
-}
-
-func NewRangeTransition(target ATNState, start, stop int) *RangeTransition {
- t := &RangeTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionRANGE,
- isEpsilon: false,
- },
- start: start,
- stop: stop,
- }
- t.intervalSet = t.makeLabel()
- return t
-}
-
-func (t *RangeTransition) makeLabel() *IntervalSet {
- s := NewIntervalSet()
- s.addRange(t.start, t.stop)
- return s
-}
-
-func (t *RangeTransition) Matches(symbol, _, _ int) bool {
- return symbol >= t.start && symbol <= t.stop
-}
-
-func (t *RangeTransition) String() string {
- var sb strings.Builder
- sb.WriteByte('\'')
- sb.WriteRune(rune(t.start))
- sb.WriteString("'..'")
- sb.WriteRune(rune(t.stop))
- sb.WriteByte('\'')
- return sb.String()
-}
-
-type AbstractPredicateTransition interface {
- Transition
- IAbstractPredicateTransitionFoo()
-}
-
-type BaseAbstractPredicateTransition struct {
- BaseTransition
-}
-
-func NewBasePredicateTransition(target ATNState) *BaseAbstractPredicateTransition {
- return &BaseAbstractPredicateTransition{
- BaseTransition: BaseTransition{
- target: target,
- },
- }
-}
-
-func (a *BaseAbstractPredicateTransition) IAbstractPredicateTransitionFoo() {}
-
-type PredicateTransition struct {
- BaseAbstractPredicateTransition
- isCtxDependent bool
- ruleIndex, predIndex int
-}
-
-func NewPredicateTransition(target ATNState, ruleIndex, predIndex int, isCtxDependent bool) *PredicateTransition {
- return &PredicateTransition{
- BaseAbstractPredicateTransition: BaseAbstractPredicateTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionPREDICATE,
- isEpsilon: true,
- },
- },
- isCtxDependent: isCtxDependent,
- ruleIndex: ruleIndex,
- predIndex: predIndex,
- }
-}
-
-func (t *PredicateTransition) Matches(_, _, _ int) bool {
- return false
-}
-
-func (t *PredicateTransition) getPredicate() *Predicate {
- return NewPredicate(t.ruleIndex, t.predIndex, t.isCtxDependent)
-}
-
-func (t *PredicateTransition) String() string {
- return "pred_" + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.predIndex)
-}
-
-type ActionTransition struct {
- BaseTransition
- isCtxDependent bool
- ruleIndex, actionIndex, predIndex int
-}
-
-func NewActionTransition(target ATNState, ruleIndex, actionIndex int, isCtxDependent bool) *ActionTransition {
- return &ActionTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionACTION,
- isEpsilon: true,
- },
- isCtxDependent: isCtxDependent,
- ruleIndex: ruleIndex,
- actionIndex: actionIndex,
- }
-}
-
-func (t *ActionTransition) Matches(_, _, _ int) bool {
- return false
-}
-
-func (t *ActionTransition) String() string {
- return "action_" + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.actionIndex)
-}
-
-type SetTransition struct {
- BaseTransition
-}
-
-func NewSetTransition(target ATNState, set *IntervalSet) *SetTransition {
- t := &SetTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionSET,
- },
- }
-
- if set != nil {
- t.intervalSet = set
- } else {
- t.intervalSet = NewIntervalSet()
- t.intervalSet.addOne(TokenInvalidType)
- }
- return t
-}
-
-func (t *SetTransition) Matches(symbol, _, _ int) bool {
- return t.intervalSet.contains(symbol)
-}
-
-func (t *SetTransition) String() string {
- return t.intervalSet.String()
-}
-
-type NotSetTransition struct {
- SetTransition
-}
-
-func NewNotSetTransition(target ATNState, set *IntervalSet) *NotSetTransition {
- t := &NotSetTransition{
- SetTransition: SetTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionNOTSET,
- },
- },
- }
- if set != nil {
- t.intervalSet = set
- } else {
- t.intervalSet = NewIntervalSet()
- t.intervalSet.addOne(TokenInvalidType)
- }
-
- return t
-}
-
-func (t *NotSetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
- return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !t.intervalSet.contains(symbol)
-}
-
-func (t *NotSetTransition) String() string {
- return "~" + t.intervalSet.String()
-}
-
-type WildcardTransition struct {
- BaseTransition
-}
-
-func NewWildcardTransition(target ATNState) *WildcardTransition {
- return &WildcardTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionWILDCARD,
- },
- }
-}
-
-func (t *WildcardTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
- return symbol >= minVocabSymbol && symbol <= maxVocabSymbol
-}
-
-func (t *WildcardTransition) String() string {
- return "."
-}
-
-type PrecedencePredicateTransition struct {
- BaseAbstractPredicateTransition
- precedence int
-}
-
-func NewPrecedencePredicateTransition(target ATNState, precedence int) *PrecedencePredicateTransition {
- return &PrecedencePredicateTransition{
- BaseAbstractPredicateTransition: BaseAbstractPredicateTransition{
- BaseTransition: BaseTransition{
- target: target,
- serializationType: TransitionPRECEDENCE,
- isEpsilon: true,
- },
- },
- precedence: precedence,
- }
-}
-
-func (t *PrecedencePredicateTransition) Matches(_, _, _ int) bool {
- return false
-}
-
-func (t *PrecedencePredicateTransition) getPredicate() *PrecedencePredicate {
- return NewPrecedencePredicate(t.precedence)
-}
-
-func (t *PrecedencePredicateTransition) String() string {
- return fmt.Sprint(t.precedence) + " >= _p"
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/tree.go b/vendor/github.com/antlr4-go/antlr/v4/tree.go
deleted file mode 100644
index c288420fb..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/tree.go
+++ /dev/null
@@ -1,304 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-// The basic notion of a tree has a parent, a payload, and a list of children.
-// It is the most abstract interface for all the trees used by ANTLR.
-///
-
-var TreeInvalidInterval = NewInterval(-1, -2)
-
-type Tree interface {
- GetParent() Tree
- SetParent(Tree)
- GetPayload() interface{}
- GetChild(i int) Tree
- GetChildCount() int
- GetChildren() []Tree
-}
-
-type SyntaxTree interface {
- Tree
- GetSourceInterval() Interval
-}
-
-type ParseTree interface {
- SyntaxTree
- Accept(Visitor ParseTreeVisitor) interface{}
- GetText() string
- ToStringTree([]string, Recognizer) string
-}
-
-type RuleNode interface {
- ParseTree
- GetRuleContext() RuleContext
-}
-
-type TerminalNode interface {
- ParseTree
- GetSymbol() Token
-}
-
-type ErrorNode interface {
- TerminalNode
-
- errorNode()
-}
-
-type ParseTreeVisitor interface {
- Visit(tree ParseTree) interface{}
- VisitChildren(node RuleNode) interface{}
- VisitTerminal(node TerminalNode) interface{}
- VisitErrorNode(node ErrorNode) interface{}
-}
-
-type BaseParseTreeVisitor struct{}
-
-var _ ParseTreeVisitor = &BaseParseTreeVisitor{}
-
-func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return tree.Accept(v) }
-func (v *BaseParseTreeVisitor) VisitChildren(_ RuleNode) interface{} { return nil }
-func (v *BaseParseTreeVisitor) VisitTerminal(_ TerminalNode) interface{} { return nil }
-func (v *BaseParseTreeVisitor) VisitErrorNode(_ ErrorNode) interface{} { return nil }
-
-// TODO: Implement this?
-//func (this ParseTreeVisitor) Visit(ctx) {
-// if (Utils.isArray(ctx)) {
-// self := this
-// return ctx.map(function(child) { return VisitAtom(self, child)})
-// } else {
-// return VisitAtom(this, ctx)
-// }
-//}
-//
-//func VisitAtom(Visitor, ctx) {
-// if (ctx.parser == nil) { //is terminal
-// return
-// }
-//
-// name := ctx.parser.ruleNames[ctx.ruleIndex]
-// funcName := "Visit" + Utils.titleCase(name)
-//
-// return Visitor[funcName](ctx)
-//}
-
-type ParseTreeListener interface {
- VisitTerminal(node TerminalNode)
- VisitErrorNode(node ErrorNode)
- EnterEveryRule(ctx ParserRuleContext)
- ExitEveryRule(ctx ParserRuleContext)
-}
-
-type BaseParseTreeListener struct{}
-
-var _ ParseTreeListener = &BaseParseTreeListener{}
-
-func (l *BaseParseTreeListener) VisitTerminal(_ TerminalNode) {}
-func (l *BaseParseTreeListener) VisitErrorNode(_ ErrorNode) {}
-func (l *BaseParseTreeListener) EnterEveryRule(_ ParserRuleContext) {}
-func (l *BaseParseTreeListener) ExitEveryRule(_ ParserRuleContext) {}
-
-type TerminalNodeImpl struct {
- parentCtx RuleContext
- symbol Token
-}
-
-var _ TerminalNode = &TerminalNodeImpl{}
-
-func NewTerminalNodeImpl(symbol Token) *TerminalNodeImpl {
- tn := new(TerminalNodeImpl)
-
- tn.parentCtx = nil
- tn.symbol = symbol
-
- return tn
-}
-
-func (t *TerminalNodeImpl) GetChild(_ int) Tree {
- return nil
-}
-
-func (t *TerminalNodeImpl) GetChildren() []Tree {
- return nil
-}
-
-func (t *TerminalNodeImpl) SetChildren(_ []Tree) {
- panic("Cannot set children on terminal node")
-}
-
-func (t *TerminalNodeImpl) GetSymbol() Token {
- return t.symbol
-}
-
-func (t *TerminalNodeImpl) GetParent() Tree {
- return t.parentCtx
-}
-
-func (t *TerminalNodeImpl) SetParent(tree Tree) {
- t.parentCtx = tree.(RuleContext)
-}
-
-func (t *TerminalNodeImpl) GetPayload() interface{} {
- return t.symbol
-}
-
-func (t *TerminalNodeImpl) GetSourceInterval() Interval {
- if t.symbol == nil {
- return TreeInvalidInterval
- }
- tokenIndex := t.symbol.GetTokenIndex()
- return NewInterval(tokenIndex, tokenIndex)
-}
-
-func (t *TerminalNodeImpl) GetChildCount() int {
- return 0
-}
-
-func (t *TerminalNodeImpl) Accept(v ParseTreeVisitor) interface{} {
- return v.VisitTerminal(t)
-}
-
-func (t *TerminalNodeImpl) GetText() string {
- return t.symbol.GetText()
-}
-
-func (t *TerminalNodeImpl) String() string {
- if t.symbol.GetTokenType() == TokenEOF {
- return ""
- }
-
- return t.symbol.GetText()
-}
-
-func (t *TerminalNodeImpl) ToStringTree(_ []string, _ Recognizer) string {
- return t.String()
-}
-
-// Represents a token that was consumed during reSynchronization
-// rather than during a valid Match operation. For example,
-// we will create this kind of a node during single token insertion
-// and deletion as well as during "consume until error recovery set"
-// upon no viable alternative exceptions.
-
-type ErrorNodeImpl struct {
- *TerminalNodeImpl
-}
-
-var _ ErrorNode = &ErrorNodeImpl{}
-
-func NewErrorNodeImpl(token Token) *ErrorNodeImpl {
- en := new(ErrorNodeImpl)
- en.TerminalNodeImpl = NewTerminalNodeImpl(token)
- return en
-}
-
-func (e *ErrorNodeImpl) errorNode() {}
-
-func (e *ErrorNodeImpl) Accept(v ParseTreeVisitor) interface{} {
- return v.VisitErrorNode(e)
-}
-
-type ParseTreeWalker struct {
-}
-
-func NewParseTreeWalker() *ParseTreeWalker {
- return new(ParseTreeWalker)
-}
-
-// Walk performs a walk on the given parse tree starting at the root and going down recursively
-// with depth-first search. On each node, [EnterRule] is called before
-// recursively walking down into child nodes, then [ExitRule] is called after the recursive call to wind up.
-func (p *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
- switch tt := t.(type) {
- case ErrorNode:
- listener.VisitErrorNode(tt)
- case TerminalNode:
- listener.VisitTerminal(tt)
- default:
- p.EnterRule(listener, t.(RuleNode))
- for i := 0; i < t.GetChildCount(); i++ {
- child := t.GetChild(i)
- p.Walk(listener, child)
- }
- p.ExitRule(listener, t.(RuleNode))
- }
-}
-
-// EnterRule enters a grammar rule by first triggering the generic event [ParseTreeListener].[EnterEveryRule]
-// then by triggering the event specific to the given parse tree node
-func (p *ParseTreeWalker) EnterRule(listener ParseTreeListener, r RuleNode) {
- ctx := r.GetRuleContext().(ParserRuleContext)
- listener.EnterEveryRule(ctx)
- ctx.EnterRule(listener)
-}
-
-// ExitRule exits a grammar rule by first triggering the event specific to the given parse tree node
-// then by triggering the generic event [ParseTreeListener].ExitEveryRule
-func (p *ParseTreeWalker) ExitRule(listener ParseTreeListener, r RuleNode) {
- ctx := r.GetRuleContext().(ParserRuleContext)
- ctx.ExitRule(listener)
- listener.ExitEveryRule(ctx)
-}
-
-//goland:noinspection GoUnusedGlobalVariable
-var ParseTreeWalkerDefault = NewParseTreeWalker()
-
-type IterativeParseTreeWalker struct {
- *ParseTreeWalker
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func NewIterativeParseTreeWalker() *IterativeParseTreeWalker {
- return new(IterativeParseTreeWalker)
-}
-
-func (i *IterativeParseTreeWalker) Walk(listener ParseTreeListener, t Tree) {
- var stack []Tree
- var indexStack []int
- currentNode := t
- currentIndex := 0
-
- for currentNode != nil {
- // pre-order visit
- switch tt := currentNode.(type) {
- case ErrorNode:
- listener.VisitErrorNode(tt)
- case TerminalNode:
- listener.VisitTerminal(tt)
- default:
- i.EnterRule(listener, currentNode.(RuleNode))
- }
- // Move down to first child, if exists
- if currentNode.GetChildCount() > 0 {
- stack = append(stack, currentNode)
- indexStack = append(indexStack, currentIndex)
- currentIndex = 0
- currentNode = currentNode.GetChild(0)
- continue
- }
-
- for {
- // post-order visit
- if ruleNode, ok := currentNode.(RuleNode); ok {
- i.ExitRule(listener, ruleNode)
- }
- // No parent, so no siblings
- if len(stack) == 0 {
- currentNode = nil
- currentIndex = 0
- break
- }
- // Move to next sibling if possible
- currentIndex++
- if stack[len(stack)-1].GetChildCount() > currentIndex {
- currentNode = stack[len(stack)-1].GetChild(currentIndex)
- break
- }
- // No next, sibling, so move up
- currentNode, stack = stack[len(stack)-1], stack[:len(stack)-1]
- currentIndex, indexStack = indexStack[len(indexStack)-1], indexStack[:len(indexStack)-1]
- }
- }
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/trees.go b/vendor/github.com/antlr4-go/antlr/v4/trees.go
deleted file mode 100644
index f44c05d81..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/trees.go
+++ /dev/null
@@ -1,142 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import "fmt"
-
-/** A set of utility routines useful for all kinds of ANTLR trees. */
-
-// TreesStringTree prints out a whole tree in LISP form. [getNodeText] is used on the
-// node payloads to get the text for the nodes. Detects parse trees and extracts data appropriately.
-func TreesStringTree(tree Tree, ruleNames []string, recog Recognizer) string {
-
- if recog != nil {
- ruleNames = recog.GetRuleNames()
- }
-
- s := TreesGetNodeText(tree, ruleNames, nil)
-
- s = EscapeWhitespace(s, false)
- c := tree.GetChildCount()
- if c == 0 {
- return s
- }
- res := "(" + s + " "
- if c > 0 {
- s = TreesStringTree(tree.GetChild(0), ruleNames, nil)
- res += s
- }
- for i := 1; i < c; i++ {
- s = TreesStringTree(tree.GetChild(i), ruleNames, nil)
- res += " " + s
- }
- res += ")"
- return res
-}
-
-func TreesGetNodeText(t Tree, ruleNames []string, recog Parser) string {
- if recog != nil {
- ruleNames = recog.GetRuleNames()
- }
-
- if ruleNames != nil {
- switch t2 := t.(type) {
- case RuleNode:
- t3 := t2.GetRuleContext()
- altNumber := t3.GetAltNumber()
-
- if altNumber != ATNInvalidAltNumber {
- return fmt.Sprintf("%s:%d", ruleNames[t3.GetRuleIndex()], altNumber)
- }
- return ruleNames[t3.GetRuleIndex()]
- case ErrorNode:
- return fmt.Sprint(t2)
- case TerminalNode:
- if t2.GetSymbol() != nil {
- return t2.GetSymbol().GetText()
- }
- }
- }
-
- // no recognition for rule names
- payload := t.GetPayload()
- if p2, ok := payload.(Token); ok {
- return p2.GetText()
- }
-
- return fmt.Sprint(t.GetPayload())
-}
-
-// TreesGetChildren returns am ordered list of all children of this node
-//
-//goland:noinspection GoUnusedExportedFunction
-func TreesGetChildren(t Tree) []Tree {
- list := make([]Tree, 0)
- for i := 0; i < t.GetChildCount(); i++ {
- list = append(list, t.GetChild(i))
- }
- return list
-}
-
-// TreesgetAncestors returns a list of all ancestors of this node. The first node of list is the root
-// and the last node is the parent of this node.
-//
-//goland:noinspection GoUnusedExportedFunction
-func TreesgetAncestors(t Tree) []Tree {
- ancestors := make([]Tree, 0)
- t = t.GetParent()
- for t != nil {
- f := []Tree{t}
- ancestors = append(f, ancestors...)
- t = t.GetParent()
- }
- return ancestors
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func TreesFindAllTokenNodes(t ParseTree, ttype int) []ParseTree {
- return TreesfindAllNodes(t, ttype, true)
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func TreesfindAllRuleNodes(t ParseTree, ruleIndex int) []ParseTree {
- return TreesfindAllNodes(t, ruleIndex, false)
-}
-
-func TreesfindAllNodes(t ParseTree, index int, findTokens bool) []ParseTree {
- nodes := make([]ParseTree, 0)
- treesFindAllNodes(t, index, findTokens, &nodes)
- return nodes
-}
-
-func treesFindAllNodes(t ParseTree, index int, findTokens bool, nodes *[]ParseTree) {
- // check this node (the root) first
-
- t2, ok := t.(TerminalNode)
- t3, ok2 := t.(ParserRuleContext)
-
- if findTokens && ok {
- if t2.GetSymbol().GetTokenType() == index {
- *nodes = append(*nodes, t2)
- }
- } else if !findTokens && ok2 {
- if t3.GetRuleIndex() == index {
- *nodes = append(*nodes, t3)
- }
- }
- // check children
- for i := 0; i < t.GetChildCount(); i++ {
- treesFindAllNodes(t.GetChild(i).(ParseTree), index, findTokens, nodes)
- }
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func TreesDescendants(t ParseTree) []ParseTree {
- nodes := []ParseTree{t}
- for i := 0; i < t.GetChildCount(); i++ {
- nodes = append(nodes, TreesDescendants(t.GetChild(i).(ParseTree))...)
- }
- return nodes
-}
diff --git a/vendor/github.com/antlr4-go/antlr/v4/utils.go b/vendor/github.com/antlr4-go/antlr/v4/utils.go
deleted file mode 100644
index 733d7df9d..000000000
--- a/vendor/github.com/antlr4-go/antlr/v4/utils.go
+++ /dev/null
@@ -1,328 +0,0 @@
-// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved.
-// Use of this file is governed by the BSD 3-clause license that
-// can be found in the LICENSE.txt file in the project root.
-
-package antlr
-
-import (
- "bytes"
- "errors"
- "fmt"
- "math/bits"
- "os"
- "strconv"
- "strings"
- "syscall"
-)
-
-func intMin(a, b int) int {
- if a < b {
- return a
- }
- return b
-}
-
-func intMax(a, b int) int {
- if a > b {
- return a
- }
- return b
-}
-
-// A simple integer stack
-
-type IntStack []int
-
-var ErrEmptyStack = errors.New("stack is empty")
-
-func (s *IntStack) Pop() (int, error) {
- l := len(*s) - 1
- if l < 0 {
- return 0, ErrEmptyStack
- }
- v := (*s)[l]
- *s = (*s)[0:l]
- return v, nil
-}
-
-func (s *IntStack) Push(e int) {
- *s = append(*s, e)
-}
-
-const bitsPerWord = 64
-
-func indexForBit(bit int) int {
- return bit / bitsPerWord
-}
-
-//goland:noinspection GoUnusedExportedFunction,GoUnusedFunction
-func wordForBit(data []uint64, bit int) uint64 {
- idx := indexForBit(bit)
- if idx >= len(data) {
- return 0
- }
- return data[idx]
-}
-
-func maskForBit(bit int) uint64 {
- return uint64(1) << (bit % bitsPerWord)
-}
-
-func wordsNeeded(bit int) int {
- return indexForBit(bit) + 1
-}
-
-type BitSet struct {
- data []uint64
-}
-
-// NewBitSet creates a new bitwise set
-// TODO: See if we can replace with the standard library's BitSet
-func NewBitSet() *BitSet {
- return &BitSet{}
-}
-
-func (b *BitSet) add(value int) {
- idx := indexForBit(value)
- if idx >= len(b.data) {
- size := wordsNeeded(value)
- data := make([]uint64, size)
- copy(data, b.data)
- b.data = data
- }
- b.data[idx] |= maskForBit(value)
-}
-
-func (b *BitSet) clear(index int) {
- idx := indexForBit(index)
- if idx >= len(b.data) {
- return
- }
- b.data[idx] &= ^maskForBit(index)
-}
-
-func (b *BitSet) or(set *BitSet) {
- // Get min size necessary to represent the bits in both sets.
- bLen := b.minLen()
- setLen := set.minLen()
- maxLen := intMax(bLen, setLen)
- if maxLen > len(b.data) {
- // Increase the size of len(b.data) to represent the bits in both sets.
- data := make([]uint64, maxLen)
- copy(data, b.data)
- b.data = data
- }
- // len(b.data) is at least setLen.
- for i := 0; i < setLen; i++ {
- b.data[i] |= set.data[i]
- }
-}
-
-func (b *BitSet) remove(value int) {
- b.clear(value)
-}
-
-func (b *BitSet) contains(value int) bool {
- idx := indexForBit(value)
- if idx >= len(b.data) {
- return false
- }
- return (b.data[idx] & maskForBit(value)) != 0
-}
-
-func (b *BitSet) minValue() int {
- for i, v := range b.data {
- if v == 0 {
- continue
- }
- return i*bitsPerWord + bits.TrailingZeros64(v)
- }
- return 2147483647
-}
-
-func (b *BitSet) equals(other interface{}) bool {
- otherBitSet, ok := other.(*BitSet)
- if !ok {
- return false
- }
-
- if b == otherBitSet {
- return true
- }
-
- // We only compare set bits, so we cannot rely on the two slices having the same size. Its
- // possible for two BitSets to have different slice lengths but the same set bits. So we only
- // compare the relevant words and ignore the trailing zeros.
- bLen := b.minLen()
- otherLen := otherBitSet.minLen()
-
- if bLen != otherLen {
- return false
- }
-
- for i := 0; i < bLen; i++ {
- if b.data[i] != otherBitSet.data[i] {
- return false
- }
- }
-
- return true
-}
-
-func (b *BitSet) minLen() int {
- for i := len(b.data); i > 0; i-- {
- if b.data[i-1] != 0 {
- return i
- }
- }
- return 0
-}
-
-func (b *BitSet) length() int {
- cnt := 0
- for _, val := range b.data {
- cnt += bits.OnesCount64(val)
- }
- return cnt
-}
-
-func (b *BitSet) String() string {
- vals := make([]string, 0, b.length())
-
- for i, v := range b.data {
- for v != 0 {
- n := bits.TrailingZeros64(v)
- vals = append(vals, strconv.Itoa(i*bitsPerWord+n))
- v &= ^(uint64(1) << n)
- }
- }
-
- return "{" + strings.Join(vals, ", ") + "}"
-}
-
-type AltDict struct {
- data map[string]interface{}
-}
-
-func NewAltDict() *AltDict {
- d := new(AltDict)
- d.data = make(map[string]interface{})
- return d
-}
-
-func (a *AltDict) Get(key string) interface{} {
- key = "k-" + key
- return a.data[key]
-}
-
-func (a *AltDict) put(key string, value interface{}) {
- key = "k-" + key
- a.data[key] = value
-}
-
-func (a *AltDict) values() []interface{} {
- vs := make([]interface{}, len(a.data))
- i := 0
- for _, v := range a.data {
- vs[i] = v
- i++
- }
- return vs
-}
-
-func EscapeWhitespace(s string, escapeSpaces bool) string {
-
- s = strings.Replace(s, "\t", "\\t", -1)
- s = strings.Replace(s, "\n", "\\n", -1)
- s = strings.Replace(s, "\r", "\\r", -1)
- if escapeSpaces {
- s = strings.Replace(s, " ", "\u00B7", -1)
- }
- return s
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func TerminalNodeToStringArray(sa []TerminalNode) []string {
- st := make([]string, len(sa))
-
- for i, s := range sa {
- st[i] = fmt.Sprintf("%v", s)
- }
-
- return st
-}
-
-//goland:noinspection GoUnusedExportedFunction
-func PrintArrayJavaStyle(sa []string) string {
- var buffer bytes.Buffer
-
- buffer.WriteString("[")
-
- for i, s := range sa {
- buffer.WriteString(s)
- if i != len(sa)-1 {
- buffer.WriteString(", ")
- }
- }
-
- buffer.WriteString("]")
-
- return buffer.String()
-}
-
-// murmur hash
-func murmurInit(seed int) int {
- return seed
-}
-
-func murmurUpdate(h int, value int) int {
- const c1 uint32 = 0xCC9E2D51
- const c2 uint32 = 0x1B873593
- const r1 uint32 = 15
- const r2 uint32 = 13
- const m uint32 = 5
- const n uint32 = 0xE6546B64
-
- k := uint32(value)
- k *= c1
- k = (k << r1) | (k >> (32 - r1))
- k *= c2
-
- hash := uint32(h) ^ k
- hash = (hash << r2) | (hash >> (32 - r2))
- hash = hash*m + n
- return int(hash)
-}
-
-func murmurFinish(h int, numberOfWords int) int {
- var hash = uint32(h)
- hash ^= uint32(numberOfWords) << 2
- hash ^= hash >> 16
- hash *= 0x85ebca6b
- hash ^= hash >> 13
- hash *= 0xc2b2ae35
- hash ^= hash >> 16
-
- return int(hash)
-}
-
-func isDirectory(dir string) (bool, error) {
- fileInfo, err := os.Stat(dir)
- if err != nil {
- switch {
- case errors.Is(err, syscall.ENOENT):
- // The given directory does not exist, so we will try to create it
- //
- err = os.MkdirAll(dir, 0755)
- if err != nil {
- return false, err
- }
-
- return true, nil
- case err != nil:
- return false, err
- default:
- }
- }
- return fileInfo.IsDir(), err
-}
diff --git a/vendor/github.com/google/cel-go/LICENSE b/vendor/github.com/google/cel-go/LICENSE
deleted file mode 100644
index 2493ed2eb..000000000
--- a/vendor/github.com/google/cel-go/LICENSE
+++ /dev/null
@@ -1,233 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-===========================================================================
-The common/types/pb/equal.go modification of proto.Equal logic
-===========================================================================
-Copyright (c) 2018 The Go Authors. All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are
-met:
-
- * Redistributions of source code must retain the above copyright
-notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
-copyright notice, this list of conditions and the following disclaimer
-in the documentation and/or other materials provided with the
-distribution.
- * Neither the name of Google Inc. nor the names of its
-contributors may be used to endorse or promote products derived from
-this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/google/cel-go/cel/BUILD.bazel b/vendor/github.com/google/cel-go/cel/BUILD.bazel
deleted file mode 100644
index 33da21623..000000000
--- a/vendor/github.com/google/cel-go/cel/BUILD.bazel
+++ /dev/null
@@ -1,90 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "cel.go",
- "decls.go",
- "env.go",
- "folding.go",
- "io.go",
- "inlining.go",
- "library.go",
- "macro.go",
- "optimizer.go",
- "options.go",
- "program.go",
- "validator.go",
- ],
- importpath = "github.com/google/cel-go/cel",
- visibility = ["//visibility:public"],
- deps = [
- "//checker:go_default_library",
- "//checker/decls:go_default_library",
- "//common:go_default_library",
- "//common/ast:go_default_library",
- "//common/containers:go_default_library",
- "//common/decls:go_default_library",
- "//common/functions:go_default_library",
- "//common/operators:go_default_library",
- "//common/overloads:go_default_library",
- "//common/stdlib:go_default_library",
- "//common/types:go_default_library",
- "//common/types/pb:go_default_library",
- "//common/types/ref:go_default_library",
- "//common/types/traits:go_default_library",
- "//interpreter:go_default_library",
- "//parser:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//reflect/protodesc:go_default_library",
- "@org_golang_google_protobuf//reflect/protoreflect:go_default_library",
- "@org_golang_google_protobuf//reflect/protoregistry:go_default_library",
- "@org_golang_google_protobuf//types/descriptorpb:go_default_library",
- "@org_golang_google_protobuf//types/dynamicpb:go_default_library",
- "@org_golang_google_protobuf//types/known/anypb:go_default_library",
- "@org_golang_google_protobuf//types/known/durationpb:go_default_library",
- "@org_golang_google_protobuf//types/known/timestamppb:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- srcs = [
- "cel_example_test.go",
- "cel_test.go",
- "decls_test.go",
- "env_test.go",
- "folding_test.go",
- "io_test.go",
- "inlining_test.go",
- "optimizer_test.go",
- "validator_test.go",
- ],
- data = [
- "//cel/testdata:gen_test_fds",
- ],
- embed = [
- ":go_default_library",
- ],
- deps = [
- "//common/operators:go_default_library",
- "//common/overloads:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- "//common/types/traits:go_default_library",
- "//test:go_default_library",
- "//test/proto2pb:go_default_library",
- "//test/proto3pb:go_default_library",
- "@io_bazel_rules_go//proto/wkt:descriptor_go_proto",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//encoding/prototext:go_default_library",
- "@org_golang_google_protobuf//types/known/structpb:go_default_library",
- "@org_golang_google_protobuf//types/known/wrapperspb:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/cel/cel.go b/vendor/github.com/google/cel-go/cel/cel.go
deleted file mode 100644
index eb5a9f4cc..000000000
--- a/vendor/github.com/google/cel-go/cel/cel.go
+++ /dev/null
@@ -1,19 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package cel defines the top-level interface for the Common Expression Language (CEL).
-//
-// CEL is a non-Turing complete expression language designed to parse, check, and evaluate
-// expressions against user-defined environments.
-package cel
diff --git a/vendor/github.com/google/cel-go/cel/decls.go b/vendor/github.com/google/cel-go/cel/decls.go
deleted file mode 100644
index b59e3708d..000000000
--- a/vendor/github.com/google/cel-go/cel/decls.go
+++ /dev/null
@@ -1,355 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/decls"
- "github.com/google/cel-go/common/functions"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// Kind indicates a CEL type's kind which is used to differentiate quickly between simple and complex types.
-type Kind = types.Kind
-
-const (
- // DynKind represents a dynamic type. This kind only exists at type-check time.
- DynKind Kind = types.DynKind
-
- // AnyKind represents a google.protobuf.Any type. This kind only exists at type-check time.
- AnyKind = types.AnyKind
-
- // BoolKind represents a boolean type.
- BoolKind = types.BoolKind
-
- // BytesKind represents a bytes type.
- BytesKind = types.BytesKind
-
- // DoubleKind represents a double type.
- DoubleKind = types.DoubleKind
-
- // DurationKind represents a CEL duration type.
- DurationKind = types.DurationKind
-
- // IntKind represents an integer type.
- IntKind = types.IntKind
-
- // ListKind represents a list type.
- ListKind = types.ListKind
-
- // MapKind represents a map type.
- MapKind = types.MapKind
-
- // NullTypeKind represents a null type.
- NullTypeKind = types.NullTypeKind
-
- // OpaqueKind represents an abstract type which has no accessible fields.
- OpaqueKind = types.OpaqueKind
-
- // StringKind represents a string type.
- StringKind = types.StringKind
-
- // StructKind represents a structured object with typed fields.
- StructKind = types.StructKind
-
- // TimestampKind represents a a CEL time type.
- TimestampKind = types.TimestampKind
-
- // TypeKind represents the CEL type.
- TypeKind = types.TypeKind
-
- // TypeParamKind represents a parameterized type whose type name will be resolved at type-check time, if possible.
- TypeParamKind = types.TypeParamKind
-
- // UintKind represents a uint type.
- UintKind = types.UintKind
-)
-
-var (
- // AnyType represents the google.protobuf.Any type.
- AnyType = types.AnyType
- // BoolType represents the bool type.
- BoolType = types.BoolType
- // BytesType represents the bytes type.
- BytesType = types.BytesType
- // DoubleType represents the double type.
- DoubleType = types.DoubleType
- // DurationType represents the CEL duration type.
- DurationType = types.DurationType
- // DynType represents a dynamic CEL type whose type will be determined at runtime from context.
- DynType = types.DynType
- // IntType represents the int type.
- IntType = types.IntType
- // NullType represents the type of a null value.
- NullType = types.NullType
- // StringType represents the string type.
- StringType = types.StringType
- // TimestampType represents the time type.
- TimestampType = types.TimestampType
- // TypeType represents a CEL type
- TypeType = types.TypeType
- // UintType represents a uint type.
- UintType = types.UintType
-
- // function references for instantiating new types.
-
- // ListType creates an instances of a list type value with the provided element type.
- ListType = types.NewListType
- // MapType creates an instance of a map type value with the provided key and value types.
- MapType = types.NewMapType
- // NullableType creates an instance of a nullable type with the provided wrapped type.
- //
- // Note: only primitive types are supported as wrapped types.
- NullableType = types.NewNullableType
- // OptionalType creates an abstract parameterized type instance corresponding to CEL's notion of optional.
- OptionalType = types.NewOptionalType
- // OpaqueType creates an abstract parameterized type with a given name.
- OpaqueType = types.NewOpaqueType
- // ObjectType creates a type references to an externally defined type, e.g. a protobuf message type.
- ObjectType = types.NewObjectType
- // TypeParamType creates a parameterized type instance.
- TypeParamType = types.NewTypeParamType
-)
-
-// Type holds a reference to a runtime type with an optional type-checked set of type parameters.
-type Type = types.Type
-
-// Constant creates an instances of an identifier declaration with a variable name, type, and value.
-func Constant(name string, t *Type, v ref.Val) EnvOption {
- return func(e *Env) (*Env, error) {
- e.variables = append(e.variables, decls.NewConstant(name, t, v))
- return e, nil
- }
-}
-
-// Variable creates an instance of a variable declaration with a variable name and type.
-func Variable(name string, t *Type) EnvOption {
- return func(e *Env) (*Env, error) {
- e.variables = append(e.variables, decls.NewVariable(name, t))
- return e, nil
- }
-}
-
-// Function defines a function and overloads with optional singleton or per-overload bindings.
-//
-// Using Function is roughly equivalent to calling Declarations() to declare the function signatures
-// and Functions() to define the function bindings, if they have been defined. Specifying the
-// same function name more than once will result in the aggregation of the function overloads. If any
-// signatures conflict between the existing and new function definition an error will be raised.
-// However, if the signatures are identical and the overload ids are the same, the redefinition will
-// be considered a no-op.
-//
-// One key difference with using Function() is that each FunctionDecl provided will handle dynamic
-// dispatch based on the type-signatures of the overloads provided which means overload resolution at
-// runtime is handled out of the box rather than via a custom binding for overload resolution via
-// Functions():
-//
-// - Overloads are searched in the order they are declared
-// - Dynamic dispatch for lists and maps is limited by inspection of the list and map contents
-//
-// at runtime. Empty lists and maps will result in a 'default dispatch'
-//
-// - In the event that a default dispatch occurs, the first overload provided is the one invoked
-//
-// If you intend to use overloads which differentiate based on the key or element type of a list or
-// map, consider using a generic function instead: e.g. func(list(T)) or func(map(K, V)) as this
-// will allow your implementation to determine how best to handle dispatch and the default behavior
-// for empty lists and maps whose contents cannot be inspected.
-//
-// For functions which use parameterized opaque types (abstract types), consider using a singleton
-// function which is capable of inspecting the contents of the type and resolving the appropriate
-// overload as CEL can only make inferences by type-name regarding such types.
-func Function(name string, opts ...FunctionOpt) EnvOption {
- return func(e *Env) (*Env, error) {
- fn, err := decls.NewFunction(name, opts...)
- if err != nil {
- return nil, err
- }
- if existing, found := e.functions[fn.Name()]; found {
- fn, err = existing.Merge(fn)
- if err != nil {
- return nil, err
- }
- }
- e.functions[fn.Name()] = fn
- return e, nil
- }
-}
-
-// FunctionOpt defines a functional option for configuring a function declaration.
-type FunctionOpt = decls.FunctionOpt
-
-// SingletonUnaryBinding creates a singleton function definition to be used for all function overloads.
-//
-// Note, this approach works well if operand is expected to have a specific trait which it implements,
-// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings.
-func SingletonUnaryBinding(fn functions.UnaryOp, traits ...int) FunctionOpt {
- return decls.SingletonUnaryBinding(fn, traits...)
-}
-
-// SingletonBinaryImpl creates a singleton function definition to be used with all function overloads.
-//
-// Note, this approach works well if operand is expected to have a specific trait which it implements,
-// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings.
-//
-// Deprecated: use SingletonBinaryBinding
-func SingletonBinaryImpl(fn functions.BinaryOp, traits ...int) FunctionOpt {
- return decls.SingletonBinaryBinding(fn, traits...)
-}
-
-// SingletonBinaryBinding creates a singleton function definition to be used with all function overloads.
-//
-// Note, this approach works well if operand is expected to have a specific trait which it implements,
-// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings.
-func SingletonBinaryBinding(fn functions.BinaryOp, traits ...int) FunctionOpt {
- return decls.SingletonBinaryBinding(fn, traits...)
-}
-
-// SingletonFunctionImpl creates a singleton function definition to be used with all function overloads.
-//
-// Note, this approach works well if operand is expected to have a specific trait which it implements,
-// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings.
-//
-// Deprecated: use SingletonFunctionBinding
-func SingletonFunctionImpl(fn functions.FunctionOp, traits ...int) FunctionOpt {
- return decls.SingletonFunctionBinding(fn, traits...)
-}
-
-// SingletonFunctionBinding creates a singleton function definition to be used with all function overloads.
-//
-// Note, this approach works well if operand is expected to have a specific trait which it implements,
-// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings.
-func SingletonFunctionBinding(fn functions.FunctionOp, traits ...int) FunctionOpt {
- return decls.SingletonFunctionBinding(fn, traits...)
-}
-
-// DisableDeclaration disables the function signatures, effectively removing them from the type-check
-// environment while preserving the runtime bindings.
-func DisableDeclaration(value bool) FunctionOpt {
- return decls.DisableDeclaration(value)
-}
-
-// Overload defines a new global overload with an overload id, argument types, and result type. Through the
-// use of OverloadOpt options, the overload may also be configured with a binding, an operand trait, and to
-// be non-strict.
-//
-// Note: function bindings should be commonly configured with Overload instances whereas operand traits and
-// strict-ness should be rare occurrences.
-func Overload(overloadID string, args []*Type, resultType *Type, opts ...OverloadOpt) FunctionOpt {
- return decls.Overload(overloadID, args, resultType, opts...)
-}
-
-// MemberOverload defines a new receiver-style overload (or member function) with an overload id, argument types,
-// and result type. Through the use of OverloadOpt options, the overload may also be configured with a binding,
-// an operand trait, and to be non-strict.
-//
-// Note: function bindings should be commonly configured with Overload instances whereas operand traits and
-// strict-ness should be rare occurrences.
-func MemberOverload(overloadID string, args []*Type, resultType *Type, opts ...OverloadOpt) FunctionOpt {
- return decls.MemberOverload(overloadID, args, resultType, opts...)
-}
-
-// OverloadOpt is a functional option for configuring a function overload.
-type OverloadOpt = decls.OverloadOpt
-
-// UnaryBinding provides the implementation of a unary overload. The provided function is protected by a runtime
-// type-guard which ensures runtime type agreement between the overload signature and runtime argument types.
-func UnaryBinding(binding functions.UnaryOp) OverloadOpt {
- return decls.UnaryBinding(binding)
-}
-
-// BinaryBinding provides the implementation of a binary overload. The provided function is protected by a runtime
-// type-guard which ensures runtime type agreement between the overload signature and runtime argument types.
-func BinaryBinding(binding functions.BinaryOp) OverloadOpt {
- return decls.BinaryBinding(binding)
-}
-
-// FunctionBinding provides the implementation of a variadic overload. The provided function is protected by a runtime
-// type-guard which ensures runtime type agreement between the overload signature and runtime argument types.
-func FunctionBinding(binding functions.FunctionOp) OverloadOpt {
- return decls.FunctionBinding(binding)
-}
-
-// OverloadIsNonStrict enables the function to be called with error and unknown argument values.
-//
-// Note: do not use this option unless absoluately necessary as it should be an uncommon feature.
-func OverloadIsNonStrict() OverloadOpt {
- return decls.OverloadIsNonStrict()
-}
-
-// OverloadOperandTrait configures a set of traits which the first argument to the overload must implement in order to be
-// successfully invoked.
-func OverloadOperandTrait(trait int) OverloadOpt {
- return decls.OverloadOperandTrait(trait)
-}
-
-// TypeToExprType converts a CEL-native type representation to a protobuf CEL Type representation.
-func TypeToExprType(t *Type) (*exprpb.Type, error) {
- return types.TypeToExprType(t)
-}
-
-// ExprTypeToType converts a protobuf CEL type representation to a CEL-native type representation.
-func ExprTypeToType(t *exprpb.Type) (*Type, error) {
- return types.ExprTypeToType(t)
-}
-
-// ExprDeclToDeclaration converts a protobuf CEL declaration to a CEL-native declaration, either a Variable or Function.
-func ExprDeclToDeclaration(d *exprpb.Decl) (EnvOption, error) {
- switch d.GetDeclKind().(type) {
- case *exprpb.Decl_Function:
- overloads := d.GetFunction().GetOverloads()
- opts := make([]FunctionOpt, len(overloads))
- for i, o := range overloads {
- args := make([]*Type, len(o.GetParams()))
- for j, p := range o.GetParams() {
- a, err := types.ExprTypeToType(p)
- if err != nil {
- return nil, err
- }
- args[j] = a
- }
- res, err := types.ExprTypeToType(o.GetResultType())
- if err != nil {
- return nil, err
- }
- if o.IsInstanceFunction {
- opts[i] = decls.MemberOverload(o.GetOverloadId(), args, res)
- } else {
- opts[i] = decls.Overload(o.GetOverloadId(), args, res)
- }
- }
- return Function(d.GetName(), opts...), nil
- case *exprpb.Decl_Ident:
- t, err := types.ExprTypeToType(d.GetIdent().GetType())
- if err != nil {
- return nil, err
- }
- if d.GetIdent().GetValue() == nil {
- return Variable(d.GetName(), t), nil
- }
- val, err := ast.ConstantToVal(d.GetIdent().GetValue())
- if err != nil {
- return nil, err
- }
- return Constant(d.GetName(), t, val), nil
- default:
- return nil, fmt.Errorf("unsupported decl: %v", d)
- }
-}
diff --git a/vendor/github.com/google/cel-go/cel/env.go b/vendor/github.com/google/cel-go/cel/env.go
deleted file mode 100644
index 6568a8b80..000000000
--- a/vendor/github.com/google/cel-go/cel/env.go
+++ /dev/null
@@ -1,881 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "errors"
- "sync"
-
- "github.com/google/cel-go/checker"
- chkdecls "github.com/google/cel-go/checker/decls"
- "github.com/google/cel-go/common"
- celast "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/decls"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/interpreter"
- "github.com/google/cel-go/parser"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// Source interface representing a user-provided expression.
-type Source = common.Source
-
-// Ast representing the checked or unchecked expression, its source, and related metadata such as
-// source position information.
-type Ast struct {
- source Source
- impl *celast.AST
-}
-
-// NativeRep converts the AST to a Go-native representation.
-func (ast *Ast) NativeRep() *celast.AST {
- return ast.impl
-}
-
-// Expr returns the proto serializable instance of the parsed/checked expression.
-//
-// Deprecated: prefer cel.AstToCheckedExpr() or cel.AstToParsedExpr() and call GetExpr()
-// the result instead.
-func (ast *Ast) Expr() *exprpb.Expr {
- if ast == nil {
- return nil
- }
- pbExpr, _ := celast.ExprToProto(ast.impl.Expr())
- return pbExpr
-}
-
-// IsChecked returns whether the Ast value has been successfully type-checked.
-func (ast *Ast) IsChecked() bool {
- if ast == nil {
- return false
- }
- return ast.impl.IsChecked()
-}
-
-// SourceInfo returns character offset and newline position information about expression elements.
-func (ast *Ast) SourceInfo() *exprpb.SourceInfo {
- if ast == nil {
- return nil
- }
- pbInfo, _ := celast.SourceInfoToProto(ast.impl.SourceInfo())
- return pbInfo
-}
-
-// ResultType returns the output type of the expression if the Ast has been type-checked, else
-// returns chkdecls.Dyn as the parse step cannot infer the type.
-//
-// Deprecated: use OutputType
-func (ast *Ast) ResultType() *exprpb.Type {
- out := ast.OutputType()
- t, err := TypeToExprType(out)
- if err != nil {
- return chkdecls.Dyn
- }
- return t
-}
-
-// OutputType returns the output type of the expression if the Ast has been type-checked, else
-// returns cel.DynType as the parse step cannot infer types.
-func (ast *Ast) OutputType() *Type {
- if ast == nil {
- return types.ErrorType
- }
- return ast.impl.GetType(ast.impl.Expr().ID())
-}
-
-// Source returns a view of the input used to create the Ast. This source may be complete or
-// constructed from the SourceInfo.
-func (ast *Ast) Source() Source {
- if ast == nil {
- return nil
- }
- return ast.source
-}
-
-// FormatType converts a type message into a string representation.
-//
-// Deprecated: prefer FormatCELType
-func FormatType(t *exprpb.Type) string {
- return checker.FormatCheckedType(t)
-}
-
-// FormatCELType formats a cel.Type value to a string representation.
-//
-// The type formatting is identical to FormatType.
-func FormatCELType(t *Type) string {
- return checker.FormatCELType(t)
-}
-
-// Env encapsulates the context necessary to perform parsing, type checking, or generation of
-// evaluable programs for different expressions.
-type Env struct {
- Container *containers.Container
- variables []*decls.VariableDecl
- functions map[string]*decls.FunctionDecl
- macros []parser.Macro
- adapter types.Adapter
- provider types.Provider
- features map[int]bool
- appliedFeatures map[int]bool
- libraries map[string]bool
- validators []ASTValidator
- costOptions []checker.CostOption
-
- // Internal parser representation
- prsr *parser.Parser
- prsrOpts []parser.Option
-
- // Internal checker representation
- chkMutex sync.Mutex
- chk *checker.Env
- chkErr error
- chkOnce sync.Once
- chkOpts []checker.Option
-
- // Program options tied to the environment
- progOpts []ProgramOption
-}
-
-// NewEnv creates a program environment configured with the standard library of CEL functions and
-// macros. The Env value returned can parse and check any CEL program which builds upon the core
-// features documented in the CEL specification.
-//
-// See the EnvOption helper functions for the options that can be used to configure the
-// environment.
-func NewEnv(opts ...EnvOption) (*Env, error) {
- // Extend the statically configured standard environment, disabling eager validation to ensure
- // the cost of setup for the environment is still just as cheap as it is in v0.11.x and earlier
- // releases. The user provided options can easily re-enable the eager validation as they are
- // processed after this default option.
- stdOpts := append([]EnvOption{EagerlyValidateDeclarations(false)}, opts...)
- env, err := getStdEnv()
- if err != nil {
- return nil, err
- }
- return env.Extend(stdOpts...)
-}
-
-// NewCustomEnv creates a custom program environment which is not automatically configured with the
-// standard library of functions and macros documented in the CEL spec.
-//
-// The purpose for using a custom environment might be for subsetting the standard library produced
-// by the cel.StdLib() function. Subsetting CEL is a core aspect of its design that allows users to
-// limit the compute and memory impact of a CEL program by controlling the functions and macros
-// that may appear in a given expression.
-//
-// See the EnvOption helper functions for the options that can be used to configure the
-// environment.
-func NewCustomEnv(opts ...EnvOption) (*Env, error) {
- registry, err := types.NewRegistry()
- if err != nil {
- return nil, err
- }
- return (&Env{
- variables: []*decls.VariableDecl{},
- functions: map[string]*decls.FunctionDecl{},
- macros: []parser.Macro{},
- Container: containers.DefaultContainer,
- adapter: registry,
- provider: registry,
- features: map[int]bool{},
- appliedFeatures: map[int]bool{},
- libraries: map[string]bool{},
- validators: []ASTValidator{},
- progOpts: []ProgramOption{},
- costOptions: []checker.CostOption{},
- }).configure(opts)
-}
-
-// Check performs type-checking on the input Ast and yields a checked Ast and/or set of Issues.
-// If any `ASTValidators` are configured on the environment, they will be applied after a valid
-// type-check result. If any issues are detected, the validators will provide them on the
-// output Issues object.
-//
-// Either checking or validation has failed if the returned Issues value and its Issues.Err()
-// value are non-nil. Issues should be inspected if they are non-nil, but may not represent a
-// fatal error.
-//
-// It is possible to have both non-nil Ast and Issues values returned from this call: however,
-// the mere presence of an Ast does not imply that it is valid for use.
-func (e *Env) Check(ast *Ast) (*Ast, *Issues) {
- // Construct the internal checker env, erroring if there is an issue adding the declarations.
- chk, err := e.initChecker()
- if err != nil {
- errs := common.NewErrors(ast.Source())
- errs.ReportError(common.NoLocation, err.Error())
- return nil, NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo())
- }
-
- checked, errs := checker.Check(ast.impl, ast.Source(), chk)
- if len(errs.GetErrors()) > 0 {
- return nil, NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo())
- }
- // Manually create the Ast to ensure that the Ast source information (which may be more
- // detailed than the information provided by Check), is returned to the caller.
- ast = &Ast{
- source: ast.Source(),
- impl: checked}
-
- // Avoid creating a validator config if it's not needed.
- if len(e.validators) == 0 {
- return ast, nil
- }
-
- // Generate a validator configuration from the set of configured validators.
- vConfig := newValidatorConfig()
- for _, v := range e.validators {
- if cv, ok := v.(ASTValidatorConfigurer); ok {
- cv.Configure(vConfig)
- }
- }
- // Apply additional validators on the type-checked result.
- iss := NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo())
- for _, v := range e.validators {
- v.Validate(e, vConfig, checked, iss)
- }
- if iss.Err() != nil {
- return nil, iss
- }
- return ast, nil
-}
-
-// Compile combines the Parse and Check phases CEL program compilation to produce an Ast and
-// associated issues.
-//
-// If an error is encountered during parsing the Compile step will not continue with the Check
-// phase. If non-error issues are encountered during Parse, they may be combined with any issues
-// discovered during Check.
-//
-// Note, for parse-only uses of CEL use Parse.
-func (e *Env) Compile(txt string) (*Ast, *Issues) {
- return e.CompileSource(common.NewTextSource(txt))
-}
-
-// CompileSource combines the Parse and Check phases CEL program compilation to produce an Ast and
-// associated issues.
-//
-// If an error is encountered during parsing the CompileSource step will not continue with the
-// Check phase. If non-error issues are encountered during Parse, they may be combined with any
-// issues discovered during Check.
-//
-// Note, for parse-only uses of CEL use Parse.
-func (e *Env) CompileSource(src Source) (*Ast, *Issues) {
- ast, iss := e.ParseSource(src)
- if iss.Err() != nil {
- return nil, iss
- }
- checked, iss2 := e.Check(ast)
- if iss2.Err() != nil {
- return nil, iss2
- }
- return checked, iss2
-}
-
-// Extend the current environment with additional options to produce a new Env.
-//
-// Note, the extended Env value should not share memory with the original. It is possible, however,
-// that a CustomTypeAdapter or CustomTypeProvider options could provide values which are mutable.
-// To ensure separation of state between extended environments either make sure the TypeAdapter and
-// TypeProvider are immutable, or that their underlying implementations are based on the
-// ref.TypeRegistry which provides a Copy method which will be invoked by this method.
-func (e *Env) Extend(opts ...EnvOption) (*Env, error) {
- chk, chkErr := e.getCheckerOrError()
- if chkErr != nil {
- return nil, chkErr
- }
-
- prsrOptsCopy := make([]parser.Option, len(e.prsrOpts))
- copy(prsrOptsCopy, e.prsrOpts)
-
- // The type-checker is configured with Declarations. The declarations may either be provided
- // as options which have not yet been validated, or may come from a previous checker instance
- // whose types have already been validated.
- chkOptsCopy := make([]checker.Option, len(e.chkOpts))
- copy(chkOptsCopy, e.chkOpts)
-
- // Copy the declarations if needed.
- varsCopy := []*decls.VariableDecl{}
- if chk != nil {
- // If the type-checker has already been instantiated, then the e.declarations have been
- // validated within the chk instance.
- chkOptsCopy = append(chkOptsCopy, checker.ValidatedDeclarations(chk))
- } else {
- // If the type-checker has not been instantiated, ensure the unvalidated declarations are
- // provided to the extended Env instance.
- varsCopy = make([]*decls.VariableDecl, len(e.variables))
- copy(varsCopy, e.variables)
- }
-
- // Copy macros and program options
- macsCopy := make([]parser.Macro, len(e.macros))
- progOptsCopy := make([]ProgramOption, len(e.progOpts))
- copy(macsCopy, e.macros)
- copy(progOptsCopy, e.progOpts)
-
- // Copy the adapter / provider if they appear to be mutable.
- adapter := e.adapter
- provider := e.provider
- adapterReg, isAdapterReg := e.adapter.(*types.Registry)
- providerReg, isProviderReg := e.provider.(*types.Registry)
- // In most cases the provider and adapter will be a ref.TypeRegistry;
- // however, in the rare cases where they are not, they are assumed to
- // be immutable. Since it is possible to set the TypeProvider separately
- // from the TypeAdapter, the possible configurations which could use a
- // TypeRegistry as the base implementation are captured below.
- if isAdapterReg && isProviderReg {
- reg := providerReg.Copy()
- provider = reg
- // If the adapter and provider are the same object, set the adapter
- // to the same ref.TypeRegistry as the provider.
- if adapterReg == providerReg {
- adapter = reg
- } else {
- // Otherwise, make a copy of the adapter.
- adapter = adapterReg.Copy()
- }
- } else if isProviderReg {
- provider = providerReg.Copy()
- } else if isAdapterReg {
- adapter = adapterReg.Copy()
- }
-
- featuresCopy := make(map[int]bool, len(e.features))
- for k, v := range e.features {
- featuresCopy[k] = v
- }
- appliedFeaturesCopy := make(map[int]bool, len(e.appliedFeatures))
- for k, v := range e.appliedFeatures {
- appliedFeaturesCopy[k] = v
- }
- funcsCopy := make(map[string]*decls.FunctionDecl, len(e.functions))
- for k, v := range e.functions {
- funcsCopy[k] = v
- }
- libsCopy := make(map[string]bool, len(e.libraries))
- for k, v := range e.libraries {
- libsCopy[k] = v
- }
- validatorsCopy := make([]ASTValidator, len(e.validators))
- copy(validatorsCopy, e.validators)
- costOptsCopy := make([]checker.CostOption, len(e.costOptions))
- copy(costOptsCopy, e.costOptions)
-
- ext := &Env{
- Container: e.Container,
- variables: varsCopy,
- functions: funcsCopy,
- macros: macsCopy,
- progOpts: progOptsCopy,
- adapter: adapter,
- features: featuresCopy,
- appliedFeatures: appliedFeaturesCopy,
- libraries: libsCopy,
- validators: validatorsCopy,
- provider: provider,
- chkOpts: chkOptsCopy,
- prsrOpts: prsrOptsCopy,
- costOptions: costOptsCopy,
- }
- return ext.configure(opts)
-}
-
-// HasFeature checks whether the environment enables the given feature
-// flag, as enumerated in options.go.
-func (e *Env) HasFeature(flag int) bool {
- enabled, has := e.features[flag]
- return has && enabled
-}
-
-// HasLibrary returns whether a specific SingletonLibrary has been configured in the environment.
-func (e *Env) HasLibrary(libName string) bool {
- configured, exists := e.libraries[libName]
- return exists && configured
-}
-
-// Libraries returns a list of SingletonLibrary that have been configured in the environment.
-func (e *Env) Libraries() []string {
- libraries := make([]string, 0, len(e.libraries))
- for libName := range e.libraries {
- libraries = append(libraries, libName)
- }
- return libraries
-}
-
-// HasValidator returns whether a specific ASTValidator has been configured in the environment.
-func (e *Env) HasValidator(name string) bool {
- for _, v := range e.validators {
- if v.Name() == name {
- return true
- }
- }
- return false
-}
-
-// Parse parses the input expression value `txt` to a Ast and/or a set of Issues.
-//
-// This form of Parse creates a Source value for the input `txt` and forwards to the
-// ParseSource method.
-func (e *Env) Parse(txt string) (*Ast, *Issues) {
- src := common.NewTextSource(txt)
- return e.ParseSource(src)
-}
-
-// ParseSource parses the input source to an Ast and/or set of Issues.
-//
-// Parsing has failed if the returned Issues value and its Issues.Err() value is non-nil.
-// Issues should be inspected if they are non-nil, but may not represent a fatal error.
-//
-// It is possible to have both non-nil Ast and Issues values returned from this call; however,
-// the mere presence of an Ast does not imply that it is valid for use.
-func (e *Env) ParseSource(src Source) (*Ast, *Issues) {
- parsed, errs := e.prsr.Parse(src)
- if len(errs.GetErrors()) > 0 {
- return nil, &Issues{errs: errs}
- }
- return &Ast{source: src, impl: parsed}, nil
-}
-
-// Program generates an evaluable instance of the Ast within the environment (Env).
-func (e *Env) Program(ast *Ast, opts ...ProgramOption) (Program, error) {
- optSet := e.progOpts
- if len(opts) != 0 {
- mergedOpts := []ProgramOption{}
- mergedOpts = append(mergedOpts, e.progOpts...)
- mergedOpts = append(mergedOpts, opts...)
- optSet = mergedOpts
- }
- return newProgram(e, ast, optSet)
-}
-
-// CELTypeAdapter returns the `types.Adapter` configured for the environment.
-func (e *Env) CELTypeAdapter() types.Adapter {
- return e.adapter
-}
-
-// CELTypeProvider returns the `types.Provider` configured for the environment.
-func (e *Env) CELTypeProvider() types.Provider {
- return e.provider
-}
-
-// TypeAdapter returns the `ref.TypeAdapter` configured for the environment.
-//
-// Deprecated: use CELTypeAdapter()
-func (e *Env) TypeAdapter() ref.TypeAdapter {
- return e.adapter
-}
-
-// TypeProvider returns the `ref.TypeProvider` configured for the environment.
-//
-// Deprecated: use CELTypeProvider()
-func (e *Env) TypeProvider() ref.TypeProvider {
- if legacyProvider, ok := e.provider.(ref.TypeProvider); ok {
- return legacyProvider
- }
- return &interopLegacyTypeProvider{Provider: e.provider}
-}
-
-// UnknownVars returns an interpreter.PartialActivation which marks all variables declared in the
-// Env as unknown AttributePattern values.
-//
-// Note, the UnknownVars will behave the same as an interpreter.EmptyActivation unless the
-// PartialAttributes option is provided as a ProgramOption.
-func (e *Env) UnknownVars() interpreter.PartialActivation {
- act := interpreter.EmptyActivation()
- part, _ := PartialVars(act, e.computeUnknownVars(act)...)
- return part
-}
-
-// PartialVars returns an interpreter.PartialActivation where all variables not in the input variable
-// set, but which have been configured in the environment, are marked as unknown.
-//
-// The `vars` value may either be an interpreter.Activation or any valid input to the
-// interpreter.NewActivation call.
-//
-// Note, this is equivalent to calling cel.PartialVars and manually configuring the set of unknown
-// variables. For more advanced use cases of partial state where portions of an object graph, rather
-// than top-level variables, are missing the PartialVars() method may be a more suitable choice.
-//
-// Note, the PartialVars will behave the same as an interpreter.EmptyActivation unless the
-// PartialAttributes option is provided as a ProgramOption.
-func (e *Env) PartialVars(vars any) (interpreter.PartialActivation, error) {
- act, err := interpreter.NewActivation(vars)
- if err != nil {
- return nil, err
- }
- return PartialVars(act, e.computeUnknownVars(act)...)
-}
-
-// ResidualAst takes an Ast and its EvalDetails to produce a new Ast which only contains the
-// attribute references which are unknown.
-//
-// Residual expressions are beneficial in a few scenarios:
-//
-// - Optimizing constant expression evaluations away.
-// - Indexing and pruning expressions based on known input arguments.
-// - Surfacing additional requirements that are needed in order to complete an evaluation.
-// - Sharing the evaluation of an expression across multiple machines/nodes.
-//
-// For example, if an expression targets a 'resource' and 'request' attribute and the possible
-// values for the resource are known, a PartialActivation could mark the 'request' as an unknown
-// interpreter.AttributePattern and the resulting ResidualAst would be reduced to only the parts
-// of the expression that reference the 'request'.
-//
-// Note, the expression ids within the residual AST generated through this method have no
-// correlation to the expression ids of the original AST.
-//
-// See the PartialVars helper for how to construct a PartialActivation.
-//
-// TODO: Consider adding an option to generate a Program.Residual to avoid round-tripping to an
-// Ast format and then Program again.
-func (e *Env) ResidualAst(a *Ast, details *EvalDetails) (*Ast, error) {
- pruned := interpreter.PruneAst(a.impl.Expr(), a.impl.SourceInfo().MacroCalls(), details.State())
- newAST := &Ast{source: a.Source(), impl: pruned}
- expr, err := AstToString(newAST)
- if err != nil {
- return nil, err
- }
- parsed, iss := e.Parse(expr)
- if iss != nil && iss.Err() != nil {
- return nil, iss.Err()
- }
- if !a.IsChecked() {
- return parsed, nil
- }
- checked, iss := e.Check(parsed)
- if iss != nil && iss.Err() != nil {
- return nil, iss.Err()
- }
- return checked, nil
-}
-
-// EstimateCost estimates the cost of a type checked CEL expression using the length estimates of input data and
-// extension functions provided by estimator.
-func (e *Env) EstimateCost(ast *Ast, estimator checker.CostEstimator, opts ...checker.CostOption) (checker.CostEstimate, error) {
- extendedOpts := make([]checker.CostOption, 0, len(e.costOptions))
- extendedOpts = append(extendedOpts, opts...)
- extendedOpts = append(extendedOpts, e.costOptions...)
- return checker.Cost(ast.impl, estimator, extendedOpts...)
-}
-
-// configure applies a series of EnvOptions to the current environment.
-func (e *Env) configure(opts []EnvOption) (*Env, error) {
- // Customized the environment using the provided EnvOption values. If an error is
- // generated at any step this, will be returned as a nil Env with a non-nil error.
- var err error
- for _, opt := range opts {
- e, err = opt(e)
- if err != nil {
- return nil, err
- }
- }
-
- // If the default UTC timezone fix has been enabled, make sure the library is configured
- e, err = e.maybeApplyFeature(featureDefaultUTCTimeZone, Lib(timeUTCLibrary{}))
- if err != nil {
- return nil, err
- }
-
- // Configure the parser.
- prsrOpts := []parser.Option{}
- prsrOpts = append(prsrOpts, e.prsrOpts...)
- prsrOpts = append(prsrOpts, parser.Macros(e.macros...))
-
- if e.HasFeature(featureEnableMacroCallTracking) {
- prsrOpts = append(prsrOpts, parser.PopulateMacroCalls(true))
- }
- if e.HasFeature(featureVariadicLogicalASTs) {
- prsrOpts = append(prsrOpts, parser.EnableVariadicOperatorASTs(true))
- }
- e.prsr, err = parser.NewParser(prsrOpts...)
- if err != nil {
- return nil, err
- }
-
- // Ensure that the checker init happens eagerly rather than lazily.
- if e.HasFeature(featureEagerlyValidateDeclarations) {
- _, err := e.initChecker()
- if err != nil {
- return nil, err
- }
- }
-
- return e, nil
-}
-
-func (e *Env) initChecker() (*checker.Env, error) {
- e.chkOnce.Do(func() {
- chkOpts := []checker.Option{}
- chkOpts = append(chkOpts, e.chkOpts...)
- chkOpts = append(chkOpts,
- checker.CrossTypeNumericComparisons(
- e.HasFeature(featureCrossTypeNumericComparisons)))
-
- ce, err := checker.NewEnv(e.Container, e.provider, chkOpts...)
- if err != nil {
- e.setCheckerOrError(nil, err)
- return
- }
- // Add the statically configured declarations.
- err = ce.AddIdents(e.variables...)
- if err != nil {
- e.setCheckerOrError(nil, err)
- return
- }
- // Add the function declarations which are derived from the FunctionDecl instances.
- for _, fn := range e.functions {
- if fn.IsDeclarationDisabled() {
- continue
- }
- err = ce.AddFunctions(fn)
- if err != nil {
- e.setCheckerOrError(nil, err)
- return
- }
- }
- // Add function declarations here separately.
- e.setCheckerOrError(ce, nil)
- })
- return e.getCheckerOrError()
-}
-
-// setCheckerOrError sets the checker.Env or error state in a concurrency-safe manner
-func (e *Env) setCheckerOrError(chk *checker.Env, chkErr error) {
- e.chkMutex.Lock()
- e.chk = chk
- e.chkErr = chkErr
- e.chkMutex.Unlock()
-}
-
-// getCheckerOrError gets the checker.Env or error state in a concurrency-safe manner
-func (e *Env) getCheckerOrError() (*checker.Env, error) {
- e.chkMutex.Lock()
- defer e.chkMutex.Unlock()
- return e.chk, e.chkErr
-}
-
-// maybeApplyFeature determines whether the feature-guarded option is enabled, and if so applies
-// the feature if it has not already been enabled.
-func (e *Env) maybeApplyFeature(feature int, option EnvOption) (*Env, error) {
- if !e.HasFeature(feature) {
- return e, nil
- }
- _, applied := e.appliedFeatures[feature]
- if applied {
- return e, nil
- }
- e, err := option(e)
- if err != nil {
- return nil, err
- }
- // record that the feature has been applied since it will generate declarations
- // and functions which will be propagated on Extend() calls and which should only
- // be registered once.
- e.appliedFeatures[feature] = true
- return e, nil
-}
-
-// computeUnknownVars determines a set of missing variables based on the input activation and the
-// environment's configured declaration set.
-func (e *Env) computeUnknownVars(vars interpreter.Activation) []*interpreter.AttributePattern {
- var unknownPatterns []*interpreter.AttributePattern
- for _, v := range e.variables {
- varName := v.Name()
- if _, found := vars.ResolveName(varName); found {
- continue
- }
- unknownPatterns = append(unknownPatterns, interpreter.NewAttributePattern(varName))
- }
- return unknownPatterns
-}
-
-// Error type which references an expression id, a location within source, and a message.
-type Error = common.Error
-
-// Issues defines methods for inspecting the error details of parse and check calls.
-//
-// Note: in the future, non-fatal warnings and notices may be inspectable via the Issues struct.
-type Issues struct {
- errs *common.Errors
- info *celast.SourceInfo
-}
-
-// NewIssues returns an Issues struct from a common.Errors object.
-func NewIssues(errs *common.Errors) *Issues {
- return NewIssuesWithSourceInfo(errs, nil)
-}
-
-// NewIssuesWithSourceInfo returns an Issues struct from a common.Errors object with SourceInfo metatata
-// which can be used with the `ReportErrorAtID` method for additional error reports within the context
-// information that's inferred from an expression id.
-func NewIssuesWithSourceInfo(errs *common.Errors, info *celast.SourceInfo) *Issues {
- return &Issues{
- errs: errs,
- info: info,
- }
-}
-
-// Err returns an error value if the issues list contains one or more errors.
-func (i *Issues) Err() error {
- if i == nil {
- return nil
- }
- if len(i.Errors()) > 0 {
- return errors.New(i.String())
- }
- return nil
-}
-
-// Errors returns the collection of errors encountered in more granular detail.
-func (i *Issues) Errors() []*Error {
- if i == nil {
- return []*Error{}
- }
- return i.errs.GetErrors()
-}
-
-// Append collects the issues from another Issues struct into a new Issues object.
-func (i *Issues) Append(other *Issues) *Issues {
- if i == nil {
- return other
- }
- if other == nil {
- return i
- }
- return NewIssues(i.errs.Append(other.errs.GetErrors()))
-}
-
-// String converts the issues to a suitable display string.
-func (i *Issues) String() string {
- if i == nil {
- return ""
- }
- return i.errs.ToDisplayString()
-}
-
-// ReportErrorAtID reports an error message with an optional set of formatting arguments.
-//
-// The source metadata for the expression at `id`, if present, is attached to the error report.
-// To ensure that source metadata is attached to error reports, use NewIssuesWithSourceInfo.
-func (i *Issues) ReportErrorAtID(id int64, message string, args ...any) {
- i.errs.ReportErrorAtID(id, i.info.GetStartLocation(id), message, args...)
-}
-
-// getStdEnv lazy initializes the CEL standard environment.
-func getStdEnv() (*Env, error) {
- stdEnvInit.Do(func() {
- stdEnv, stdEnvErr = NewCustomEnv(StdLib(), EagerlyValidateDeclarations(true))
- })
- return stdEnv, stdEnvErr
-}
-
-// interopCELTypeProvider layers support for the types.Provider interface on top of a ref.TypeProvider.
-type interopCELTypeProvider struct {
- ref.TypeProvider
-}
-
-// FindStructType returns a types.Type instance for the given fully-qualified typeName if one exists.
-//
-// This method proxies to the underyling ref.TypeProvider's FindType method and converts protobuf type
-// into a native type representation. If the conversion fails, the type is listed as not found.
-func (p *interopCELTypeProvider) FindStructType(typeName string) (*types.Type, bool) {
- if et, found := p.FindType(typeName); found {
- t, err := types.ExprTypeToType(et)
- if err != nil {
- return nil, false
- }
- return t, true
- }
- return nil, false
-}
-
-// FindStructFieldNames returns an empty set of field for the interop provider.
-//
-// To inspect the field names, migrate to a `types.Provider` implementation.
-func (p *interopCELTypeProvider) FindStructFieldNames(typeName string) ([]string, bool) {
- return []string{}, false
-}
-
-// FindStructFieldType returns a types.FieldType instance for the given fully-qualified typeName and field
-// name, if one exists.
-//
-// This method proxies to the underyling ref.TypeProvider's FindFieldType method and converts protobuf type
-// into a native type representation. If the conversion fails, the type is listed as not found.
-func (p *interopCELTypeProvider) FindStructFieldType(structType, fieldName string) (*types.FieldType, bool) {
- if ft, found := p.FindFieldType(structType, fieldName); found {
- t, err := types.ExprTypeToType(ft.Type)
- if err != nil {
- return nil, false
- }
- return &types.FieldType{
- Type: t,
- IsSet: ft.IsSet,
- GetFrom: ft.GetFrom,
- }, true
- }
- return nil, false
-}
-
-// interopLegacyTypeProvider layers support for the ref.TypeProvider interface on top of a types.Provider.
-type interopLegacyTypeProvider struct {
- types.Provider
-}
-
-// FindType retruns the protobuf Type representation for the input type name if one exists.
-//
-// This method proxies to the underlying types.Provider FindStructType method and converts the types.Type
-// value to a protobuf Type representation.
-//
-// Failure to convert the type will result in the type not being found.
-func (p *interopLegacyTypeProvider) FindType(typeName string) (*exprpb.Type, bool) {
- if t, found := p.FindStructType(typeName); found {
- et, err := types.TypeToExprType(t)
- if err != nil {
- return nil, false
- }
- return et, true
- }
- return nil, false
-}
-
-// FindFieldType returns the protobuf-based FieldType representation for the input type name and field,
-// if one exists.
-//
-// This call proxies to the types.Provider FindStructFieldType method and converts the types.FIeldType
-// value to a protobuf-based ref.FieldType representation if found.
-//
-// Failure to convert the FieldType will result in the field not being found.
-func (p *interopLegacyTypeProvider) FindFieldType(structType, fieldName string) (*ref.FieldType, bool) {
- if cft, found := p.FindStructFieldType(structType, fieldName); found {
- et, err := types.TypeToExprType(cft.Type)
- if err != nil {
- return nil, false
- }
- return &ref.FieldType{
- Type: et,
- IsSet: cft.IsSet,
- GetFrom: cft.GetFrom,
- }, true
- }
- return nil, false
-}
-
-var (
- stdEnvInit sync.Once
- stdEnv *Env
- stdEnvErr error
-)
diff --git a/vendor/github.com/google/cel-go/cel/folding.go b/vendor/github.com/google/cel-go/cel/folding.go
deleted file mode 100644
index d7060896d..000000000
--- a/vendor/github.com/google/cel-go/cel/folding.go
+++ /dev/null
@@ -1,559 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-)
-
-// ConstantFoldingOption defines a functional option for configuring constant folding.
-type ConstantFoldingOption func(opt *constantFoldingOptimizer) (*constantFoldingOptimizer, error)
-
-// MaxConstantFoldIterations limits the number of times literals may be folding during optimization.
-//
-// Defaults to 100 if not set.
-func MaxConstantFoldIterations(limit int) ConstantFoldingOption {
- return func(opt *constantFoldingOptimizer) (*constantFoldingOptimizer, error) {
- opt.maxFoldIterations = limit
- return opt, nil
- }
-}
-
-// NewConstantFoldingOptimizer creates an optimizer which inlines constant scalar an aggregate
-// literal values within function calls and select statements with their evaluated result.
-func NewConstantFoldingOptimizer(opts ...ConstantFoldingOption) (ASTOptimizer, error) {
- folder := &constantFoldingOptimizer{
- maxFoldIterations: defaultMaxConstantFoldIterations,
- }
- var err error
- for _, o := range opts {
- folder, err = o(folder)
- if err != nil {
- return nil, err
- }
- }
- return folder, nil
-}
-
-type constantFoldingOptimizer struct {
- maxFoldIterations int
-}
-
-// Optimize queries the expression graph for scalar and aggregate literal expressions within call and
-// select statements and then evaluates them and replaces the call site with the literal result.
-//
-// Note: only values which can be represented as literals in CEL syntax are supported.
-func (opt *constantFoldingOptimizer) Optimize(ctx *OptimizerContext, a *ast.AST) *ast.AST {
- root := ast.NavigateAST(a)
-
- // Walk the list of foldable expression and continue to fold until there are no more folds left.
- // All of the fold candidates returned by the constantExprMatcher should succeed unless there's
- // a logic bug with the selection of expressions.
- foldableExprs := ast.MatchDescendants(root, constantExprMatcher)
- foldCount := 0
- for len(foldableExprs) != 0 && foldCount < opt.maxFoldIterations {
- for _, fold := range foldableExprs {
- // If the expression could be folded because it's a non-strict call, and the
- // branches are pruned, continue to the next fold.
- if fold.Kind() == ast.CallKind && maybePruneBranches(ctx, fold) {
- continue
- }
- // Otherwise, assume all context is needed to evaluate the expression.
- err := tryFold(ctx, a, fold)
- if err != nil {
- ctx.ReportErrorAtID(fold.ID(), "constant-folding evaluation failed: %v", err.Error())
- return a
- }
- }
- foldCount++
- foldableExprs = ast.MatchDescendants(root, constantExprMatcher)
- }
- // Once all of the constants have been folded, try to run through the remaining comprehensions
- // one last time. In this case, there's no guarantee they'll run, so we only update the
- // target comprehension node with the literal value if the evaluation succeeds.
- for _, compre := range ast.MatchDescendants(root, ast.KindMatcher(ast.ComprehensionKind)) {
- tryFold(ctx, a, compre)
- }
-
- // If the output is a list, map, or struct which contains optional entries, then prune it
- // to make sure that the optionals, if resolved, do not surface in the output literal.
- pruneOptionalElements(ctx, root)
-
- // Ensure that all intermediate values in the folded expression can be represented as valid
- // CEL literals within the AST structure. Use `PostOrderVisit` rather than `MatchDescendents`
- // to avoid extra allocations during this final pass through the AST.
- ast.PostOrderVisit(root, ast.NewExprVisitor(func(e ast.Expr) {
- if e.Kind() != ast.LiteralKind {
- return
- }
- val := e.AsLiteral()
- adapted, err := adaptLiteral(ctx, val)
- if err != nil {
- ctx.ReportErrorAtID(root.ID(), "constant-folding evaluation failed: %v", err.Error())
- return
- }
- ctx.UpdateExpr(e, adapted)
- }))
-
- return a
-}
-
-// tryFold attempts to evaluate a sub-expression to a literal.
-//
-// If the evaluation succeeds, the input expr value will be modified to become a literal, otherwise
-// the method will return an error.
-func tryFold(ctx *OptimizerContext, a *ast.AST, expr ast.Expr) error {
- // Assume all context is needed to evaluate the expression.
- subAST := &Ast{
- impl: ast.NewCheckedAST(ast.NewAST(expr, a.SourceInfo()), a.TypeMap(), a.ReferenceMap()),
- }
- prg, err := ctx.Program(subAST)
- if err != nil {
- return err
- }
- out, _, err := prg.Eval(NoVars())
- if err != nil {
- return err
- }
- // Update the fold expression to be a literal.
- ctx.UpdateExpr(expr, ctx.NewLiteral(out))
- return nil
-}
-
-// maybePruneBranches inspects the non-strict call expression to determine whether
-// a branch can be removed. Evaluation will naturally prune logical and / or calls,
-// but conditional will not be pruned cleanly, so this is one small area where the
-// constant folding step reimplements a portion of the evaluator.
-func maybePruneBranches(ctx *OptimizerContext, expr ast.NavigableExpr) bool {
- call := expr.AsCall()
- args := call.Args()
- switch call.FunctionName() {
- case operators.LogicalAnd, operators.LogicalOr:
- return maybeShortcircuitLogic(ctx, call.FunctionName(), args, expr)
- case operators.Conditional:
- cond := args[0]
- truthy := args[1]
- falsy := args[2]
- if cond.Kind() != ast.LiteralKind {
- return false
- }
- if cond.AsLiteral() == types.True {
- ctx.UpdateExpr(expr, truthy)
- } else {
- ctx.UpdateExpr(expr, falsy)
- }
- return true
- case operators.In:
- haystack := args[1]
- if haystack.Kind() == ast.ListKind && haystack.AsList().Size() == 0 {
- ctx.UpdateExpr(expr, ctx.NewLiteral(types.False))
- return true
- }
- needle := args[0]
- if needle.Kind() == ast.LiteralKind && haystack.Kind() == ast.ListKind {
- needleValue := needle.AsLiteral()
- list := haystack.AsList()
- for _, e := range list.Elements() {
- if e.Kind() == ast.LiteralKind && e.AsLiteral().Equal(needleValue) == types.True {
- ctx.UpdateExpr(expr, ctx.NewLiteral(types.True))
- return true
- }
- }
- }
- }
- return false
-}
-
-func maybeShortcircuitLogic(ctx *OptimizerContext, function string, args []ast.Expr, expr ast.NavigableExpr) bool {
- shortcircuit := types.False
- skip := types.True
- if function == operators.LogicalOr {
- shortcircuit = types.True
- skip = types.False
- }
- newArgs := []ast.Expr{}
- for _, arg := range args {
- if arg.Kind() != ast.LiteralKind {
- newArgs = append(newArgs, arg)
- continue
- }
- if arg.AsLiteral() == skip {
- continue
- }
- if arg.AsLiteral() == shortcircuit {
- ctx.UpdateExpr(expr, arg)
- return true
- }
- }
- if len(newArgs) == 0 {
- newArgs = append(newArgs, args[0])
- ctx.UpdateExpr(expr, newArgs[0])
- return true
- }
- if len(newArgs) == 1 {
- ctx.UpdateExpr(expr, newArgs[0])
- return true
- }
- ctx.UpdateExpr(expr, ctx.NewCall(function, newArgs...))
- return true
-}
-
-// pruneOptionalElements works from the bottom up to resolve optional elements within
-// aggregate literals.
-//
-// Note, many aggregate literals will be resolved as arguments to functions or select
-// statements, so this method exists to handle the case where the literal could not be
-// fully resolved or exists outside of a call, select, or comprehension context.
-func pruneOptionalElements(ctx *OptimizerContext, root ast.NavigableExpr) {
- aggregateLiterals := ast.MatchDescendants(root, aggregateLiteralMatcher)
- for _, lit := range aggregateLiterals {
- switch lit.Kind() {
- case ast.ListKind:
- pruneOptionalListElements(ctx, lit)
- case ast.MapKind:
- pruneOptionalMapEntries(ctx, lit)
- case ast.StructKind:
- pruneOptionalStructFields(ctx, lit)
- }
- }
-}
-
-func pruneOptionalListElements(ctx *OptimizerContext, e ast.Expr) {
- l := e.AsList()
- elems := l.Elements()
- optIndices := l.OptionalIndices()
- if len(optIndices) == 0 {
- return
- }
- updatedElems := []ast.Expr{}
- updatedIndices := []int32{}
- newOptIndex := -1
- for _, e := range elems {
- newOptIndex++
- if !l.IsOptional(int32(newOptIndex)) {
- updatedElems = append(updatedElems, e)
- continue
- }
- if e.Kind() != ast.LiteralKind {
- updatedElems = append(updatedElems, e)
- updatedIndices = append(updatedIndices, int32(newOptIndex))
- continue
- }
- optElemVal, ok := e.AsLiteral().(*types.Optional)
- if !ok {
- updatedElems = append(updatedElems, e)
- updatedIndices = append(updatedIndices, int32(newOptIndex))
- continue
- }
- if !optElemVal.HasValue() {
- newOptIndex-- // Skipping causes the list to get smaller.
- continue
- }
- ctx.UpdateExpr(e, ctx.NewLiteral(optElemVal.GetValue()))
- updatedElems = append(updatedElems, e)
- }
- ctx.UpdateExpr(e, ctx.NewList(updatedElems, updatedIndices))
-}
-
-func pruneOptionalMapEntries(ctx *OptimizerContext, e ast.Expr) {
- m := e.AsMap()
- entries := m.Entries()
- updatedEntries := []ast.EntryExpr{}
- modified := false
- for _, e := range entries {
- entry := e.AsMapEntry()
- key := entry.Key()
- val := entry.Value()
- // If the entry is not optional, or the value-side of the optional hasn't
- // been resolved to a literal, then preserve the entry as-is.
- if !entry.IsOptional() || val.Kind() != ast.LiteralKind {
- updatedEntries = append(updatedEntries, e)
- continue
- }
- optElemVal, ok := val.AsLiteral().(*types.Optional)
- if !ok {
- updatedEntries = append(updatedEntries, e)
- continue
- }
- // When the key is not a literal, but the value is, then it needs to be
- // restored to an optional value.
- if key.Kind() != ast.LiteralKind {
- undoOptVal, err := adaptLiteral(ctx, optElemVal)
- if err != nil {
- ctx.ReportErrorAtID(val.ID(), "invalid map value literal %v: %v", optElemVal, err)
- }
- ctx.UpdateExpr(val, undoOptVal)
- updatedEntries = append(updatedEntries, e)
- continue
- }
- modified = true
- if !optElemVal.HasValue() {
- continue
- }
- ctx.UpdateExpr(val, ctx.NewLiteral(optElemVal.GetValue()))
- updatedEntry := ctx.NewMapEntry(key, val, false)
- updatedEntries = append(updatedEntries, updatedEntry)
- }
- if modified {
- ctx.UpdateExpr(e, ctx.NewMap(updatedEntries))
- }
-}
-
-func pruneOptionalStructFields(ctx *OptimizerContext, e ast.Expr) {
- s := e.AsStruct()
- fields := s.Fields()
- updatedFields := []ast.EntryExpr{}
- modified := false
- for _, f := range fields {
- field := f.AsStructField()
- val := field.Value()
- if !field.IsOptional() || val.Kind() != ast.LiteralKind {
- updatedFields = append(updatedFields, f)
- continue
- }
- optElemVal, ok := val.AsLiteral().(*types.Optional)
- if !ok {
- updatedFields = append(updatedFields, f)
- continue
- }
- modified = true
- if !optElemVal.HasValue() {
- continue
- }
- ctx.UpdateExpr(val, ctx.NewLiteral(optElemVal.GetValue()))
- updatedField := ctx.NewStructField(field.Name(), val, false)
- updatedFields = append(updatedFields, updatedField)
- }
- if modified {
- ctx.UpdateExpr(e, ctx.NewStruct(s.TypeName(), updatedFields))
- }
-}
-
-// adaptLiteral converts a runtime CEL value to its equivalent literal expression.
-//
-// For strongly typed values, the type-provider will be used to reconstruct the fields
-// which are present in the literal and their equivalent initialization values.
-func adaptLiteral(ctx *OptimizerContext, val ref.Val) (ast.Expr, error) {
- switch t := val.Type().(type) {
- case *types.Type:
- switch t {
- case types.BoolType, types.BytesType, types.DoubleType, types.IntType,
- types.NullType, types.StringType, types.UintType:
- return ctx.NewLiteral(val), nil
- case types.DurationType:
- return ctx.NewCall(
- overloads.TypeConvertDuration,
- ctx.NewLiteral(val.ConvertToType(types.StringType)),
- ), nil
- case types.TimestampType:
- return ctx.NewCall(
- overloads.TypeConvertTimestamp,
- ctx.NewLiteral(val.ConvertToType(types.StringType)),
- ), nil
- case types.OptionalType:
- opt := val.(*types.Optional)
- if !opt.HasValue() {
- return ctx.NewCall("optional.none"), nil
- }
- target, err := adaptLiteral(ctx, opt.GetValue())
- if err != nil {
- return nil, err
- }
- return ctx.NewCall("optional.of", target), nil
- case types.TypeType:
- return ctx.NewIdent(val.(*types.Type).TypeName()), nil
- case types.ListType:
- l, ok := val.(traits.Lister)
- if !ok {
- return nil, fmt.Errorf("failed to adapt %v to literal", val)
- }
- elems := make([]ast.Expr, l.Size().(types.Int))
- idx := 0
- it := l.Iterator()
- for it.HasNext() == types.True {
- elemVal := it.Next()
- elemExpr, err := adaptLiteral(ctx, elemVal)
- if err != nil {
- return nil, err
- }
- elems[idx] = elemExpr
- idx++
- }
- return ctx.NewList(elems, []int32{}), nil
- case types.MapType:
- m, ok := val.(traits.Mapper)
- if !ok {
- return nil, fmt.Errorf("failed to adapt %v to literal", val)
- }
- entries := make([]ast.EntryExpr, m.Size().(types.Int))
- idx := 0
- it := m.Iterator()
- for it.HasNext() == types.True {
- keyVal := it.Next()
- keyExpr, err := adaptLiteral(ctx, keyVal)
- if err != nil {
- return nil, err
- }
- valVal := m.Get(keyVal)
- valExpr, err := adaptLiteral(ctx, valVal)
- if err != nil {
- return nil, err
- }
- entries[idx] = ctx.NewMapEntry(keyExpr, valExpr, false)
- idx++
- }
- return ctx.NewMap(entries), nil
- default:
- provider := ctx.CELTypeProvider()
- fields, found := provider.FindStructFieldNames(t.TypeName())
- if !found {
- return nil, fmt.Errorf("failed to adapt %v to literal", val)
- }
- tester := val.(traits.FieldTester)
- indexer := val.(traits.Indexer)
- fieldInits := []ast.EntryExpr{}
- for _, f := range fields {
- field := types.String(f)
- if tester.IsSet(field) != types.True {
- continue
- }
- fieldVal := indexer.Get(field)
- fieldExpr, err := adaptLiteral(ctx, fieldVal)
- if err != nil {
- return nil, err
- }
- fieldInits = append(fieldInits, ctx.NewStructField(f, fieldExpr, false))
- }
- return ctx.NewStruct(t.TypeName(), fieldInits), nil
- }
- }
- return nil, fmt.Errorf("failed to adapt %v to literal", val)
-}
-
-// constantExprMatcher matches calls, select statements, and comprehensions whose arguments
-// are all constant scalar or aggregate literal values.
-//
-// Only comprehensions which are not nested are included as possible constant folds, and only
-// if all variables referenced in the comprehension stack exist are only iteration or
-// accumulation variables.
-func constantExprMatcher(e ast.NavigableExpr) bool {
- switch e.Kind() {
- case ast.CallKind:
- return constantCallMatcher(e)
- case ast.SelectKind:
- sel := e.AsSelect() // guaranteed to be a navigable value
- return constantMatcher(sel.Operand().(ast.NavigableExpr))
- case ast.ComprehensionKind:
- if isNestedComprehension(e) {
- return false
- }
- vars := map[string]bool{}
- constantExprs := true
- visitor := ast.NewExprVisitor(func(e ast.Expr) {
- if e.Kind() == ast.ComprehensionKind {
- nested := e.AsComprehension()
- vars[nested.AccuVar()] = true
- vars[nested.IterVar()] = true
- }
- if e.Kind() == ast.IdentKind && !vars[e.AsIdent()] {
- constantExprs = false
- }
- })
- ast.PreOrderVisit(e, visitor)
- return constantExprs
- default:
- return false
- }
-}
-
-// constantCallMatcher identifies strict and non-strict calls which can be folded.
-func constantCallMatcher(e ast.NavigableExpr) bool {
- call := e.AsCall()
- children := e.Children()
- fnName := call.FunctionName()
- if fnName == operators.LogicalAnd {
- for _, child := range children {
- if child.Kind() == ast.LiteralKind {
- return true
- }
- }
- }
- if fnName == operators.LogicalOr {
- for _, child := range children {
- if child.Kind() == ast.LiteralKind {
- return true
- }
- }
- }
- if fnName == operators.Conditional {
- cond := children[0]
- if cond.Kind() == ast.LiteralKind && cond.AsLiteral().Type() == types.BoolType {
- return true
- }
- }
- if fnName == operators.In {
- haystack := children[1]
- if haystack.Kind() == ast.ListKind && haystack.AsList().Size() == 0 {
- return true
- }
- needle := children[0]
- if needle.Kind() == ast.LiteralKind && haystack.Kind() == ast.ListKind {
- needleValue := needle.AsLiteral()
- list := haystack.AsList()
- for _, e := range list.Elements() {
- if e.Kind() == ast.LiteralKind && e.AsLiteral().Equal(needleValue) == types.True {
- return true
- }
- }
- }
- }
- // convert all other calls with constant arguments
- for _, child := range children {
- if !constantMatcher(child) {
- return false
- }
- }
- return true
-}
-
-func isNestedComprehension(e ast.NavigableExpr) bool {
- parent, found := e.Parent()
- for found {
- if parent.Kind() == ast.ComprehensionKind {
- return true
- }
- parent, found = parent.Parent()
- }
- return false
-}
-
-func aggregateLiteralMatcher(e ast.NavigableExpr) bool {
- return e.Kind() == ast.ListKind || e.Kind() == ast.MapKind || e.Kind() == ast.StructKind
-}
-
-var (
- constantMatcher = ast.ConstantValueMatcher()
-)
-
-const (
- defaultMaxConstantFoldIterations = 100
-)
diff --git a/vendor/github.com/google/cel-go/cel/inlining.go b/vendor/github.com/google/cel-go/cel/inlining.go
deleted file mode 100644
index 8c8335d3b..000000000
--- a/vendor/github.com/google/cel-go/cel/inlining.go
+++ /dev/null
@@ -1,240 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/traits"
-)
-
-// InlineVariable holds a variable name to be matched and an AST representing
-// the expression graph which should be used to replace it.
-type InlineVariable struct {
- name string
- alias string
- def *ast.AST
-}
-
-// Name returns the qualified variable or field selection to replace.
-func (v *InlineVariable) Name() string {
- return v.name
-}
-
-// Alias returns the alias to use when performing cel.bind() calls during inlining.
-func (v *InlineVariable) Alias() string {
- return v.alias
-}
-
-// Expr returns the inlined expression value.
-func (v *InlineVariable) Expr() ast.Expr {
- return v.def.Expr()
-}
-
-// Type indicates the inlined expression type.
-func (v *InlineVariable) Type() *Type {
- return v.def.GetType(v.def.Expr().ID())
-}
-
-// NewInlineVariable declares a variable name to be replaced by a checked expression.
-func NewInlineVariable(name string, definition *Ast) *InlineVariable {
- return NewInlineVariableWithAlias(name, name, definition)
-}
-
-// NewInlineVariableWithAlias declares a variable name to be replaced by a checked expression.
-// If the variable occurs more than once, the provided alias will be used to replace the expressions
-// where the variable name occurs.
-func NewInlineVariableWithAlias(name, alias string, definition *Ast) *InlineVariable {
- return &InlineVariable{name: name, alias: alias, def: definition.impl}
-}
-
-// NewInliningOptimizer creates and optimizer which replaces variables with expression definitions.
-//
-// If a variable occurs one time, the variable is replaced by the inline definition. If the
-// variable occurs more than once, the variable occurences are replaced by a cel.bind() call.
-func NewInliningOptimizer(inlineVars ...*InlineVariable) ASTOptimizer {
- return &inliningOptimizer{variables: inlineVars}
-}
-
-type inliningOptimizer struct {
- variables []*InlineVariable
-}
-
-func (opt *inliningOptimizer) Optimize(ctx *OptimizerContext, a *ast.AST) *ast.AST {
- root := ast.NavigateAST(a)
- for _, inlineVar := range opt.variables {
- matches := ast.MatchDescendants(root, opt.matchVariable(inlineVar.Name()))
- // Skip cases where the variable isn't in the expression graph
- if len(matches) == 0 {
- continue
- }
-
- // For a single match, do a direct replacement of the expression sub-graph.
- if len(matches) == 1 || !isBindable(matches, inlineVar.Expr(), inlineVar.Type()) {
- for _, match := range matches {
- // Copy the inlined AST expr and source info.
- copyExpr := copyASTAndMetadata(ctx, inlineVar.def)
- opt.inlineExpr(ctx, match, copyExpr, inlineVar.Type())
- }
- continue
- }
-
- // For multiple matches, find the least common ancestor (lca) and insert the
- // variable as a cel.bind() macro.
- var lca ast.NavigableExpr = root
- lcaAncestorCount := 0
- ancestors := map[int64]int{}
- for _, match := range matches {
- // Update the identifier matches with the provided alias.
- parent, found := match, true
- for found {
- ancestorCount, hasAncestor := ancestors[parent.ID()]
- if !hasAncestor {
- ancestors[parent.ID()] = 1
- parent, found = parent.Parent()
- continue
- }
- if lcaAncestorCount < ancestorCount || (lcaAncestorCount == ancestorCount && lca.Depth() < parent.Depth()) {
- lca = parent
- lcaAncestorCount = ancestorCount
- }
- ancestors[parent.ID()] = ancestorCount + 1
- parent, found = parent.Parent()
- }
- aliasExpr := ctx.NewIdent(inlineVar.Alias())
- opt.inlineExpr(ctx, match, aliasExpr, inlineVar.Type())
- }
-
- // Copy the inlined AST expr and source info.
- copyExpr := copyASTAndMetadata(ctx, inlineVar.def)
- // Update the least common ancestor by inserting a cel.bind() call to the alias.
- inlined, bindMacro := ctx.NewBindMacro(lca.ID(), inlineVar.Alias(), copyExpr, lca)
- opt.inlineExpr(ctx, lca, inlined, inlineVar.Type())
- ctx.sourceInfo.SetMacroCall(lca.ID(), bindMacro)
- }
- return a
-}
-
-// copyASTAndMetadata copies the input AST and propagates the macro metadata into the AST being
-// optimized.
-func copyASTAndMetadata(ctx *OptimizerContext, a *ast.AST) ast.Expr {
- copyExpr, copyInfo := ctx.CopyAST(a)
- // Add in the macro calls from the inlined AST
- for id, call := range copyInfo.MacroCalls() {
- ctx.sourceInfo.SetMacroCall(id, call)
- }
- return copyExpr
-}
-
-// inlineExpr replaces the current expression with the inlined one, unless the location of the inlining
-// happens within a presence test, e.g. has(a.b.c) -> inline alpha for a.b.c in which case an attempt is
-// made to determine whether the inlined value can be presence or existence tested.
-func (opt *inliningOptimizer) inlineExpr(ctx *OptimizerContext, prev ast.NavigableExpr, inlined ast.Expr, inlinedType *Type) {
- switch prev.Kind() {
- case ast.SelectKind:
- sel := prev.AsSelect()
- if !sel.IsTestOnly() {
- ctx.UpdateExpr(prev, inlined)
- return
- }
- opt.rewritePresenceExpr(ctx, prev, inlined, inlinedType)
- default:
- ctx.UpdateExpr(prev, inlined)
- }
-}
-
-// rewritePresenceExpr converts the inlined expression, when it occurs within a has() macro, to type-safe
-// expression appropriate for the inlined type, if possible.
-//
-// If the rewrite is not possible an error is reported at the inline expression site.
-func (opt *inliningOptimizer) rewritePresenceExpr(ctx *OptimizerContext, prev, inlined ast.Expr, inlinedType *Type) {
- // If the input inlined expression is not a select expression it won't work with the has()
- // macro. Attempt to rewrite the presence test in terms of the typed input, otherwise error.
- if inlined.Kind() == ast.SelectKind {
- presenceTest, hasMacro := ctx.NewHasMacro(prev.ID(), inlined)
- ctx.UpdateExpr(prev, presenceTest)
- ctx.sourceInfo.SetMacroCall(prev.ID(), hasMacro)
- return
- }
-
- ctx.sourceInfo.ClearMacroCall(prev.ID())
- if inlinedType.IsAssignableType(NullType) {
- ctx.UpdateExpr(prev,
- ctx.NewCall(operators.NotEquals,
- inlined,
- ctx.NewLiteral(types.NullValue),
- ))
- return
- }
- if inlinedType.HasTrait(traits.SizerType) {
- ctx.UpdateExpr(prev,
- ctx.NewCall(operators.NotEquals,
- ctx.NewMemberCall(overloads.Size, inlined),
- ctx.NewLiteral(types.IntZero),
- ))
- return
- }
- ctx.ReportErrorAtID(prev.ID(), "unable to inline expression type %v into presence test", inlinedType)
-}
-
-// isBindable indicates whether the inlined type can be used within a cel.bind() if the expression
-// being replaced occurs within a presence test. Value types with a size() method or field selection
-// support can be bound.
-//
-// In future iterations, support may also be added for indexer types which can be rewritten as an `in`
-// expression; however, this would imply a rewrite of the inlined expression that may not be necessary
-// in most cases.
-func isBindable(matches []ast.NavigableExpr, inlined ast.Expr, inlinedType *Type) bool {
- if inlinedType.IsAssignableType(NullType) ||
- inlinedType.HasTrait(traits.SizerType) ||
- inlinedType.HasTrait(traits.FieldTesterType) {
- return true
- }
- for _, m := range matches {
- if m.Kind() != ast.SelectKind {
- continue
- }
- sel := m.AsSelect()
- if sel.IsTestOnly() {
- return false
- }
- }
- return true
-}
-
-// matchVariable matches simple identifiers, select expressions, and presence test expressions
-// which match the (potentially) qualified variable name provided as input.
-//
-// Note, this function does not support inlining against select expressions which includes optional
-// field selection. This may be a future refinement.
-func (opt *inliningOptimizer) matchVariable(varName string) ast.ExprMatcher {
- return func(e ast.NavigableExpr) bool {
- if e.Kind() == ast.IdentKind && e.AsIdent() == varName {
- return true
- }
- if e.Kind() == ast.SelectKind {
- sel := e.AsSelect()
- // While the `ToQualifiedName` call could take the select directly, this
- // would skip presence tests from possible matches, which we would like
- // to include.
- qualName, found := containers.ToQualifiedName(sel.Operand())
- return found && qualName+"."+sel.FieldName() == varName
- }
- return false
- }
-}
diff --git a/vendor/github.com/google/cel-go/cel/io.go b/vendor/github.com/google/cel-go/cel/io.go
deleted file mode 100644
index 3133fb9d7..000000000
--- a/vendor/github.com/google/cel-go/cel/io.go
+++ /dev/null
@@ -1,252 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "errors"
- "fmt"
- "reflect"
-
- "google.golang.org/protobuf/proto"
-
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
- "github.com/google/cel-go/parser"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
- anypb "google.golang.org/protobuf/types/known/anypb"
-)
-
-// CheckedExprToAst converts a checked expression proto message to an Ast.
-func CheckedExprToAst(checkedExpr *exprpb.CheckedExpr) *Ast {
- checked, _ := CheckedExprToAstWithSource(checkedExpr, nil)
- return checked
-}
-
-// CheckedExprToAstWithSource converts a checked expression proto message to an Ast,
-// using the provided Source as the textual contents.
-//
-// In general the source is not necessary unless the AST has been modified between the
-// `Parse` and `Check` calls as an `Ast` created from the `Parse` step will carry the source
-// through future calls.
-//
-// Prefer CheckedExprToAst if loading expressions from storage.
-func CheckedExprToAstWithSource(checkedExpr *exprpb.CheckedExpr, src Source) (*Ast, error) {
- checked, err := ast.ToAST(checkedExpr)
- if err != nil {
- return nil, err
- }
- return &Ast{source: src, impl: checked}, nil
-}
-
-// AstToCheckedExpr converts an Ast to an protobuf CheckedExpr value.
-//
-// If the Ast.IsChecked() returns false, this conversion method will return an error.
-func AstToCheckedExpr(a *Ast) (*exprpb.CheckedExpr, error) {
- if !a.IsChecked() {
- return nil, fmt.Errorf("cannot convert unchecked ast")
- }
- return ast.ToProto(a.impl)
-}
-
-// ParsedExprToAst converts a parsed expression proto message to an Ast.
-func ParsedExprToAst(parsedExpr *exprpb.ParsedExpr) *Ast {
- return ParsedExprToAstWithSource(parsedExpr, nil)
-}
-
-// ParsedExprToAstWithSource converts a parsed expression proto message to an Ast,
-// using the provided Source as the textual contents.
-//
-// In general you only need this if you need to recheck a previously checked
-// expression, or if you need to separately check a subset of an expression.
-//
-// Prefer ParsedExprToAst if loading expressions from storage.
-func ParsedExprToAstWithSource(parsedExpr *exprpb.ParsedExpr, src Source) *Ast {
- info, _ := ast.ProtoToSourceInfo(parsedExpr.GetSourceInfo())
- if src == nil {
- src = common.NewInfoSource(parsedExpr.GetSourceInfo())
- }
- e, _ := ast.ProtoToExpr(parsedExpr.GetExpr())
- return &Ast{source: src, impl: ast.NewAST(e, info)}
-}
-
-// AstToParsedExpr converts an Ast to an protobuf ParsedExpr value.
-func AstToParsedExpr(a *Ast) (*exprpb.ParsedExpr, error) {
- return &exprpb.ParsedExpr{
- Expr: a.Expr(),
- SourceInfo: a.SourceInfo(),
- }, nil
-}
-
-// AstToString converts an Ast back to a string if possible.
-//
-// Note, the conversion may not be an exact replica of the original expression, but will produce
-// a string that is semantically equivalent and whose textual representation is stable.
-func AstToString(a *Ast) (string, error) {
- return parser.Unparse(a.impl.Expr(), a.impl.SourceInfo())
-}
-
-// RefValueToValue converts between ref.Val and api.expr.Value.
-// The result Value is the serialized proto form. The ref.Val must not be error or unknown.
-func RefValueToValue(res ref.Val) (*exprpb.Value, error) {
- switch res.Type() {
- case types.BoolType:
- return &exprpb.Value{
- Kind: &exprpb.Value_BoolValue{BoolValue: res.Value().(bool)}}, nil
- case types.BytesType:
- return &exprpb.Value{
- Kind: &exprpb.Value_BytesValue{BytesValue: res.Value().([]byte)}}, nil
- case types.DoubleType:
- return &exprpb.Value{
- Kind: &exprpb.Value_DoubleValue{DoubleValue: res.Value().(float64)}}, nil
- case types.IntType:
- return &exprpb.Value{
- Kind: &exprpb.Value_Int64Value{Int64Value: res.Value().(int64)}}, nil
- case types.ListType:
- l := res.(traits.Lister)
- sz := l.Size().(types.Int)
- elts := make([]*exprpb.Value, 0, int64(sz))
- for i := types.Int(0); i < sz; i++ {
- v, err := RefValueToValue(l.Get(i))
- if err != nil {
- return nil, err
- }
- elts = append(elts, v)
- }
- return &exprpb.Value{
- Kind: &exprpb.Value_ListValue{
- ListValue: &exprpb.ListValue{Values: elts}}}, nil
- case types.MapType:
- mapper := res.(traits.Mapper)
- sz := mapper.Size().(types.Int)
- entries := make([]*exprpb.MapValue_Entry, 0, int64(sz))
- for it := mapper.Iterator(); it.HasNext().(types.Bool); {
- k := it.Next()
- v := mapper.Get(k)
- kv, err := RefValueToValue(k)
- if err != nil {
- return nil, err
- }
- vv, err := RefValueToValue(v)
- if err != nil {
- return nil, err
- }
- entries = append(entries, &exprpb.MapValue_Entry{Key: kv, Value: vv})
- }
- return &exprpb.Value{
- Kind: &exprpb.Value_MapValue{
- MapValue: &exprpb.MapValue{Entries: entries}}}, nil
- case types.NullType:
- return &exprpb.Value{
- Kind: &exprpb.Value_NullValue{}}, nil
- case types.StringType:
- return &exprpb.Value{
- Kind: &exprpb.Value_StringValue{StringValue: res.Value().(string)}}, nil
- case types.TypeType:
- typeName := res.(ref.Type).TypeName()
- return &exprpb.Value{Kind: &exprpb.Value_TypeValue{TypeValue: typeName}}, nil
- case types.UintType:
- return &exprpb.Value{
- Kind: &exprpb.Value_Uint64Value{Uint64Value: res.Value().(uint64)}}, nil
- default:
- any, err := res.ConvertToNative(anyPbType)
- if err != nil {
- return nil, err
- }
- return &exprpb.Value{
- Kind: &exprpb.Value_ObjectValue{ObjectValue: any.(*anypb.Any)}}, nil
- }
-}
-
-var (
- typeNameToTypeValue = map[string]ref.Val{
- "bool": types.BoolType,
- "bytes": types.BytesType,
- "double": types.DoubleType,
- "null_type": types.NullType,
- "int": types.IntType,
- "list": types.ListType,
- "map": types.MapType,
- "string": types.StringType,
- "type": types.TypeType,
- "uint": types.UintType,
- }
-
- anyPbType = reflect.TypeOf(&anypb.Any{})
-)
-
-// ValueToRefValue converts between exprpb.Value and ref.Val.
-func ValueToRefValue(adapter types.Adapter, v *exprpb.Value) (ref.Val, error) {
- switch v.Kind.(type) {
- case *exprpb.Value_NullValue:
- return types.NullValue, nil
- case *exprpb.Value_BoolValue:
- return types.Bool(v.GetBoolValue()), nil
- case *exprpb.Value_Int64Value:
- return types.Int(v.GetInt64Value()), nil
- case *exprpb.Value_Uint64Value:
- return types.Uint(v.GetUint64Value()), nil
- case *exprpb.Value_DoubleValue:
- return types.Double(v.GetDoubleValue()), nil
- case *exprpb.Value_StringValue:
- return types.String(v.GetStringValue()), nil
- case *exprpb.Value_BytesValue:
- return types.Bytes(v.GetBytesValue()), nil
- case *exprpb.Value_ObjectValue:
- any := v.GetObjectValue()
- msg, err := anypb.UnmarshalNew(any, proto.UnmarshalOptions{DiscardUnknown: true})
- if err != nil {
- return nil, err
- }
- return adapter.NativeToValue(msg), nil
- case *exprpb.Value_MapValue:
- m := v.GetMapValue()
- entries := make(map[ref.Val]ref.Val)
- for _, entry := range m.Entries {
- key, err := ValueToRefValue(adapter, entry.Key)
- if err != nil {
- return nil, err
- }
- pb, err := ValueToRefValue(adapter, entry.Value)
- if err != nil {
- return nil, err
- }
- entries[key] = pb
- }
- return adapter.NativeToValue(entries), nil
- case *exprpb.Value_ListValue:
- l := v.GetListValue()
- elts := make([]ref.Val, len(l.Values))
- for i, e := range l.Values {
- rv, err := ValueToRefValue(adapter, e)
- if err != nil {
- return nil, err
- }
- elts[i] = rv
- }
- return adapter.NativeToValue(elts), nil
- case *exprpb.Value_TypeValue:
- typeName := v.GetTypeValue()
- tv, ok := typeNameToTypeValue[typeName]
- if ok {
- return tv, nil
- }
- return types.NewObjectTypeValue(typeName), nil
- }
- return nil, errors.New("unknown value")
-}
diff --git a/vendor/github.com/google/cel-go/cel/library.go b/vendor/github.com/google/cel-go/cel/library.go
deleted file mode 100644
index deddc14e5..000000000
--- a/vendor/github.com/google/cel-go/cel/library.go
+++ /dev/null
@@ -1,784 +0,0 @@
-// Copyright 2020 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "math"
- "strconv"
- "strings"
- "time"
-
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/stdlib"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
- "github.com/google/cel-go/interpreter"
- "github.com/google/cel-go/parser"
-)
-
-const (
- optMapMacro = "optMap"
- optFlatMapMacro = "optFlatMap"
- hasValueFunc = "hasValue"
- optionalNoneFunc = "optional.none"
- optionalOfFunc = "optional.of"
- optionalOfNonZeroValueFunc = "optional.ofNonZeroValue"
- valueFunc = "value"
- unusedIterVar = "#unused"
-)
-
-// Library provides a collection of EnvOption and ProgramOption values used to configure a CEL
-// environment for a particular use case or with a related set of functionality.
-//
-// Note, the ProgramOption values provided by a library are expected to be static and not vary
-// between calls to Env.Program(). If there is a need for such dynamic configuration, prefer to
-// configure these options outside the Library and within the Env.Program() call directly.
-type Library interface {
- // CompileOptions returns a collection of functional options for configuring the Parse / Check
- // environment.
- CompileOptions() []EnvOption
-
- // ProgramOptions returns a collection of functional options which should be included in every
- // Program generated from the Env.Program() call.
- ProgramOptions() []ProgramOption
-}
-
-// SingletonLibrary refines the Library interface to ensure that libraries in this format are only
-// configured once within the environment.
-type SingletonLibrary interface {
- Library
-
- // LibraryName provides a namespaced name which is used to check whether the library has already
- // been configured in the environment.
- LibraryName() string
-}
-
-// Lib creates an EnvOption out of a Library, allowing libraries to be provided as functional args,
-// and to be linked to each other.
-func Lib(l Library) EnvOption {
- singleton, isSingleton := l.(SingletonLibrary)
- return func(e *Env) (*Env, error) {
- if isSingleton {
- if e.HasLibrary(singleton.LibraryName()) {
- return e, nil
- }
- e.libraries[singleton.LibraryName()] = true
- }
- var err error
- for _, opt := range l.CompileOptions() {
- e, err = opt(e)
- if err != nil {
- return nil, err
- }
- }
- e.progOpts = append(e.progOpts, l.ProgramOptions()...)
- return e, nil
- }
-}
-
-// StdLib returns an EnvOption for the standard library of CEL functions and macros.
-func StdLib() EnvOption {
- return Lib(stdLibrary{})
-}
-
-// stdLibrary implements the Library interface and provides functional options for the core CEL
-// features documented in the specification.
-type stdLibrary struct{}
-
-// LibraryName implements the SingletonLibrary interface method.
-func (stdLibrary) LibraryName() string {
- return "cel.lib.std"
-}
-
-// CompileOptions returns options for the standard CEL function declarations and macros.
-func (stdLibrary) CompileOptions() []EnvOption {
- return []EnvOption{
- func(e *Env) (*Env, error) {
- var err error
- for _, fn := range stdlib.Functions() {
- existing, found := e.functions[fn.Name()]
- if found {
- fn, err = existing.Merge(fn)
- if err != nil {
- return nil, err
- }
- }
- e.functions[fn.Name()] = fn
- }
- return e, nil
- },
- func(e *Env) (*Env, error) {
- e.variables = append(e.variables, stdlib.Types()...)
- return e, nil
- },
- Macros(StandardMacros...),
- }
-}
-
-// ProgramOptions returns function implementations for the standard CEL functions.
-func (stdLibrary) ProgramOptions() []ProgramOption {
- return []ProgramOption{}
-}
-
-// OptionalTypes enable support for optional syntax and types in CEL.
-//
-// The optional value type makes it possible to express whether variables have
-// been provided, whether a result has been computed, and in the future whether
-// an object field path, map key value, or list index has a value.
-//
-// # Syntax Changes
-//
-// OptionalTypes are unlike other CEL extensions because they modify the CEL
-// syntax itself, notably through the use of a `?` preceding a field name or
-// index value.
-//
-// ## Field Selection
-//
-// The optional syntax in field selection is denoted as `obj.?field`. In other
-// words, if a field is set, return `optional.of(obj.field)“, else
-// `optional.none()`. The optional field selection is viral in the sense that
-// after the first optional selection all subsequent selections or indices
-// are treated as optional, i.e. the following expressions are equivalent:
-//
-// obj.?field.subfield
-// obj.?field.?subfield
-//
-// ## Indexing
-//
-// Similar to field selection, the optional syntax can be used in index
-// expressions on maps and lists:
-//
-// list[?0]
-// map[?key]
-//
-// ## Optional Field Setting
-//
-// When creating map or message literals, if a field may be optionally set
-// based on its presence, then placing a `?` before the field name or key
-// will ensure the type on the right-hand side must be optional(T) where T
-// is the type of the field or key-value.
-//
-// The following returns a map with the key expression set only if the
-// subfield is present, otherwise an empty map is created:
-//
-// {?key: obj.?field.subfield}
-//
-// ## Optional Element Setting
-//
-// When creating list literals, an element in the list may be optionally added
-// when the element expression is preceded by a `?`:
-//
-// [a, ?b, ?c] // return a list with either [a], [a, b], [a, b, c], or [a, c]
-//
-// # Optional.Of
-//
-// Create an optional(T) value of a given value with type T.
-//
-// optional.of(10)
-//
-// # Optional.OfNonZeroValue
-//
-// Create an optional(T) value of a given value with type T if it is not a
-// zero-value. A zero-value the default empty value for any given CEL type,
-// including empty protobuf message types. If the value is empty, the result
-// of this call will be optional.none().
-//
-// optional.ofNonZeroValue([1, 2, 3]) // optional(list(int))
-// optional.ofNonZeroValue([]) // optional.none()
-// optional.ofNonZeroValue(0) // optional.none()
-// optional.ofNonZeroValue("") // optional.none()
-//
-// # Optional.None
-//
-// Create an empty optional value.
-//
-// # HasValue
-//
-// Determine whether the optional contains a value.
-//
-// optional.of(b'hello').hasValue() // true
-// optional.ofNonZeroValue({}).hasValue() // false
-//
-// # Value
-//
-// Get the value contained by the optional. If the optional does not have a
-// value, the result will be a CEL error.
-//
-// optional.of(b'hello').value() // b'hello'
-// optional.ofNonZeroValue({}).value() // error
-//
-// # Or
-//
-// If the value on the left-hand side is optional.none(), the optional value
-// on the right hand side is returned. If the value on the left-hand set is
-// valued, then it is returned. This operation is short-circuiting and will
-// only evaluate as many links in the `or` chain as are needed to return a
-// non-empty optional value.
-//
-// obj.?field.or(m[?key])
-// l[?index].or(obj.?field.subfield).or(obj.?other)
-//
-// # OrValue
-//
-// Either return the value contained within the optional on the left-hand side
-// or return the alternative value on the right hand side.
-//
-// m[?key].orValue("none")
-//
-// # OptMap
-//
-// Apply a transformation to the optional's underlying value if it is not empty
-// and return an optional typed result based on the transformation. The
-// transformation expression type must return a type T which is wrapped into
-// an optional.
-//
-// msg.?elements.optMap(e, e.size()).orValue(0)
-//
-// # OptFlatMap
-//
-// Introduced in version: 1
-//
-// Apply a transformation to the optional's underlying value if it is not empty
-// and return the result. The transform expression must return an optional(T)
-// rather than type T. This can be useful when dealing with zero values and
-// conditionally generating an empty or non-empty result in ways which cannot
-// be expressed with `optMap`.
-//
-// msg.?elements.optFlatMap(e, e[?0]) // return the first element if present.
-func OptionalTypes(opts ...OptionalTypesOption) EnvOption {
- lib := &optionalLib{version: math.MaxUint32}
- for _, opt := range opts {
- lib = opt(lib)
- }
- return Lib(lib)
-}
-
-type optionalLib struct {
- version uint32
-}
-
-// OptionalTypesOption is a functional interface for configuring the strings library.
-type OptionalTypesOption func(*optionalLib) *optionalLib
-
-// OptionalTypesVersion configures the version of the optional type library.
-//
-// The version limits which functions are available. Only functions introduced
-// below or equal to the given version included in the library. If this option
-// is not set, all functions are available.
-//
-// See the library documentation to determine which version a function was introduced.
-// If the documentation does not state which version a function was introduced, it can
-// be assumed to be introduced at version 0, when the library was first created.
-func OptionalTypesVersion(version uint32) OptionalTypesOption {
- return func(lib *optionalLib) *optionalLib {
- lib.version = version
- return lib
- }
-}
-
-// LibraryName implements the SingletonLibrary interface method.
-func (lib *optionalLib) LibraryName() string {
- return "cel.lib.optional"
-}
-
-// CompileOptions implements the Library interface method.
-func (lib *optionalLib) CompileOptions() []EnvOption {
- paramTypeK := TypeParamType("K")
- paramTypeV := TypeParamType("V")
- optionalTypeV := OptionalType(paramTypeV)
- listTypeV := ListType(paramTypeV)
- mapTypeKV := MapType(paramTypeK, paramTypeV)
-
- opts := []EnvOption{
- // Enable the optional syntax in the parser.
- enableOptionalSyntax(),
-
- // Introduce the optional type.
- Types(types.OptionalType),
-
- // Configure the optMap and optFlatMap macros.
- Macros(ReceiverMacro(optMapMacro, 2, optMap)),
-
- // Global and member functions for working with optional values.
- Function(optionalOfFunc,
- Overload("optional_of", []*Type{paramTypeV}, optionalTypeV,
- UnaryBinding(func(value ref.Val) ref.Val {
- return types.OptionalOf(value)
- }))),
- Function(optionalOfNonZeroValueFunc,
- Overload("optional_ofNonZeroValue", []*Type{paramTypeV}, optionalTypeV,
- UnaryBinding(func(value ref.Val) ref.Val {
- v, isZeroer := value.(traits.Zeroer)
- if !isZeroer || !v.IsZeroValue() {
- return types.OptionalOf(value)
- }
- return types.OptionalNone
- }))),
- Function(optionalNoneFunc,
- Overload("optional_none", []*Type{}, optionalTypeV,
- FunctionBinding(func(values ...ref.Val) ref.Val {
- return types.OptionalNone
- }))),
- Function(valueFunc,
- MemberOverload("optional_value", []*Type{optionalTypeV}, paramTypeV,
- UnaryBinding(func(value ref.Val) ref.Val {
- opt := value.(*types.Optional)
- return opt.GetValue()
- }))),
- Function(hasValueFunc,
- MemberOverload("optional_hasValue", []*Type{optionalTypeV}, BoolType,
- UnaryBinding(func(value ref.Val) ref.Val {
- opt := value.(*types.Optional)
- return types.Bool(opt.HasValue())
- }))),
-
- // Implementation of 'or' and 'orValue' are special-cased to support short-circuiting in the
- // evaluation chain.
- Function("or",
- MemberOverload("optional_or_optional", []*Type{optionalTypeV, optionalTypeV}, optionalTypeV)),
- Function("orValue",
- MemberOverload("optional_orValue_value", []*Type{optionalTypeV, paramTypeV}, paramTypeV)),
-
- // OptSelect is handled specially by the type-checker, so the receiver's field type is used to determine the
- // optput type.
- Function(operators.OptSelect,
- Overload("select_optional_field", []*Type{DynType, StringType}, optionalTypeV)),
-
- // OptIndex is handled mostly like any other indexing operation on a list or map, so the type-checker can use
- // these signatures to determine type-agreement without any special handling.
- Function(operators.OptIndex,
- Overload("list_optindex_optional_int", []*Type{listTypeV, IntType}, optionalTypeV),
- Overload("optional_list_optindex_optional_int", []*Type{OptionalType(listTypeV), IntType}, optionalTypeV),
- Overload("map_optindex_optional_value", []*Type{mapTypeKV, paramTypeK}, optionalTypeV),
- Overload("optional_map_optindex_optional_value", []*Type{OptionalType(mapTypeKV), paramTypeK}, optionalTypeV)),
-
- // Index overloads to accommodate using an optional value as the operand.
- Function(operators.Index,
- Overload("optional_list_index_int", []*Type{OptionalType(listTypeV), IntType}, optionalTypeV),
- Overload("optional_map_index_value", []*Type{OptionalType(mapTypeKV), paramTypeK}, optionalTypeV)),
- }
- if lib.version >= 1 {
- opts = append(opts, Macros(ReceiverMacro(optFlatMapMacro, 2, optFlatMap)))
- }
- return opts
-}
-
-// ProgramOptions implements the Library interface method.
-func (lib *optionalLib) ProgramOptions() []ProgramOption {
- return []ProgramOption{
- CustomDecorator(decorateOptionalOr),
- }
-}
-
-func optMap(meh MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *Error) {
- varIdent := args[0]
- varName := ""
- switch varIdent.Kind() {
- case ast.IdentKind:
- varName = varIdent.AsIdent()
- default:
- return nil, meh.NewError(varIdent.ID(), "optMap() variable name must be a simple identifier")
- }
- mapExpr := args[1]
- return meh.NewCall(
- operators.Conditional,
- meh.NewMemberCall(hasValueFunc, target),
- meh.NewCall(optionalOfFunc,
- meh.NewComprehension(
- meh.NewList(),
- unusedIterVar,
- varName,
- meh.NewMemberCall(valueFunc, target),
- meh.NewLiteral(types.False),
- meh.NewIdent(varName),
- mapExpr,
- ),
- ),
- meh.NewCall(optionalNoneFunc),
- ), nil
-}
-
-func optFlatMap(meh MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *Error) {
- varIdent := args[0]
- varName := ""
- switch varIdent.Kind() {
- case ast.IdentKind:
- varName = varIdent.AsIdent()
- default:
- return nil, meh.NewError(varIdent.ID(), "optFlatMap() variable name must be a simple identifier")
- }
- mapExpr := args[1]
- return meh.NewCall(
- operators.Conditional,
- meh.NewMemberCall(hasValueFunc, target),
- meh.NewComprehension(
- meh.NewList(),
- unusedIterVar,
- varName,
- meh.NewMemberCall(valueFunc, target),
- meh.NewLiteral(types.False),
- meh.NewIdent(varName),
- mapExpr,
- ),
- meh.NewCall(optionalNoneFunc),
- ), nil
-}
-
-func enableOptionalSyntax() EnvOption {
- return func(e *Env) (*Env, error) {
- e.prsrOpts = append(e.prsrOpts, parser.EnableOptionalSyntax(true))
- return e, nil
- }
-}
-
-func decorateOptionalOr(i interpreter.Interpretable) (interpreter.Interpretable, error) {
- call, ok := i.(interpreter.InterpretableCall)
- if !ok {
- return i, nil
- }
- args := call.Args()
- if len(args) != 2 {
- return i, nil
- }
- switch call.Function() {
- case "or":
- if call.OverloadID() != "" && call.OverloadID() != "optional_or_optional" {
- return i, nil
- }
- return &evalOptionalOr{
- id: call.ID(),
- lhs: args[0],
- rhs: args[1],
- }, nil
- case "orValue":
- if call.OverloadID() != "" && call.OverloadID() != "optional_orValue_value" {
- return i, nil
- }
- return &evalOptionalOrValue{
- id: call.ID(),
- lhs: args[0],
- rhs: args[1],
- }, nil
- default:
- return i, nil
- }
-}
-
-// evalOptionalOr selects between two optional values, either the first if it has a value, or
-// the second optional expression is evaluated and returned.
-type evalOptionalOr struct {
- id int64
- lhs interpreter.Interpretable
- rhs interpreter.Interpretable
-}
-
-// ID implements the Interpretable interface method.
-func (opt *evalOptionalOr) ID() int64 {
- return opt.id
-}
-
-// Eval evaluates the left-hand side optional to determine whether it contains a value, else
-// proceeds with the right-hand side evaluation.
-func (opt *evalOptionalOr) Eval(ctx interpreter.Activation) ref.Val {
- // short-circuit lhs.
- optLHS := opt.lhs.Eval(ctx)
- optVal, ok := optLHS.(*types.Optional)
- if !ok {
- return optLHS
- }
- if optVal.HasValue() {
- return optVal
- }
- return opt.rhs.Eval(ctx)
-}
-
-// evalOptionalOrValue selects between an optional or a concrete value. If the optional has a value,
-// its value is returned, otherwise the alternative value expression is evaluated and returned.
-type evalOptionalOrValue struct {
- id int64
- lhs interpreter.Interpretable
- rhs interpreter.Interpretable
-}
-
-// ID implements the Interpretable interface method.
-func (opt *evalOptionalOrValue) ID() int64 {
- return opt.id
-}
-
-// Eval evaluates the left-hand side optional to determine whether it contains a value, else
-// proceeds with the right-hand side evaluation.
-func (opt *evalOptionalOrValue) Eval(ctx interpreter.Activation) ref.Val {
- // short-circuit lhs.
- optLHS := opt.lhs.Eval(ctx)
- optVal, ok := optLHS.(*types.Optional)
- if !ok {
- return optLHS
- }
- if optVal.HasValue() {
- return optVal.GetValue()
- }
- return opt.rhs.Eval(ctx)
-}
-
-type timeUTCLibrary struct{}
-
-func (timeUTCLibrary) CompileOptions() []EnvOption {
- return timeOverloadDeclarations
-}
-
-func (timeUTCLibrary) ProgramOptions() []ProgramOption {
- return []ProgramOption{}
-}
-
-// Declarations and functions which enable using UTC on time.Time inputs when the timezone is unspecified
-// in the CEL expression.
-var (
- utcTZ = types.String("UTC")
-
- timeOverloadDeclarations = []EnvOption{
- Function(overloads.TimeGetHours,
- MemberOverload(overloads.DurationToHours, []*Type{DurationType}, IntType,
- UnaryBinding(types.DurationGetHours))),
- Function(overloads.TimeGetMinutes,
- MemberOverload(overloads.DurationToMinutes, []*Type{DurationType}, IntType,
- UnaryBinding(types.DurationGetMinutes))),
- Function(overloads.TimeGetSeconds,
- MemberOverload(overloads.DurationToSeconds, []*Type{DurationType}, IntType,
- UnaryBinding(types.DurationGetSeconds))),
- Function(overloads.TimeGetMilliseconds,
- MemberOverload(overloads.DurationToMilliseconds, []*Type{DurationType}, IntType,
- UnaryBinding(types.DurationGetMilliseconds))),
- Function(overloads.TimeGetFullYear,
- MemberOverload(overloads.TimestampToYear, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetFullYear(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToYearWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetFullYear),
- ),
- ),
- Function(overloads.TimeGetMonth,
- MemberOverload(overloads.TimestampToMonth, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetMonth(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToMonthWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetMonth),
- ),
- ),
- Function(overloads.TimeGetDayOfYear,
- MemberOverload(overloads.TimestampToDayOfYear, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetDayOfYear(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToDayOfYearWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(func(ts, tz ref.Val) ref.Val {
- return timestampGetDayOfYear(ts, tz)
- }),
- ),
- ),
- Function(overloads.TimeGetDayOfMonth,
- MemberOverload(overloads.TimestampToDayOfMonthZeroBased, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetDayOfMonthZeroBased(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToDayOfMonthZeroBasedWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetDayOfMonthZeroBased),
- ),
- ),
- Function(overloads.TimeGetDate,
- MemberOverload(overloads.TimestampToDayOfMonthOneBased, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetDayOfMonthOneBased(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToDayOfMonthOneBasedWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetDayOfMonthOneBased),
- ),
- ),
- Function(overloads.TimeGetDayOfWeek,
- MemberOverload(overloads.TimestampToDayOfWeek, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetDayOfWeek(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToDayOfWeekWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetDayOfWeek),
- ),
- ),
- Function(overloads.TimeGetHours,
- MemberOverload(overloads.TimestampToHours, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetHours(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToHoursWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetHours),
- ),
- ),
- Function(overloads.TimeGetMinutes,
- MemberOverload(overloads.TimestampToMinutes, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetMinutes(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToMinutesWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetMinutes),
- ),
- ),
- Function(overloads.TimeGetSeconds,
- MemberOverload(overloads.TimestampToSeconds, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetSeconds(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToSecondsWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetSeconds),
- ),
- ),
- Function(overloads.TimeGetMilliseconds,
- MemberOverload(overloads.TimestampToMilliseconds, []*Type{TimestampType}, IntType,
- UnaryBinding(func(ts ref.Val) ref.Val {
- return timestampGetMilliseconds(ts, utcTZ)
- }),
- ),
- MemberOverload(overloads.TimestampToMillisecondsWithTz, []*Type{TimestampType, StringType}, IntType,
- BinaryBinding(timestampGetMilliseconds),
- ),
- ),
- }
-)
-
-func timestampGetFullYear(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.Year())
-}
-
-func timestampGetMonth(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- // CEL spec indicates that the month should be 0-based, but the Time value
- // for Month() is 1-based.
- return types.Int(t.Month() - 1)
-}
-
-func timestampGetDayOfYear(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.YearDay() - 1)
-}
-
-func timestampGetDayOfMonthZeroBased(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.Day() - 1)
-}
-
-func timestampGetDayOfMonthOneBased(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.Day())
-}
-
-func timestampGetDayOfWeek(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.Weekday())
-}
-
-func timestampGetHours(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.Hour())
-}
-
-func timestampGetMinutes(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.Minute())
-}
-
-func timestampGetSeconds(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.Second())
-}
-
-func timestampGetMilliseconds(ts, tz ref.Val) ref.Val {
- t, err := inTimeZone(ts, tz)
- if err != nil {
- return types.NewErr(err.Error())
- }
- return types.Int(t.Nanosecond() / 1000000)
-}
-
-func inTimeZone(ts, tz ref.Val) (time.Time, error) {
- t := ts.(types.Timestamp)
- val := string(tz.(types.String))
- ind := strings.Index(val, ":")
- if ind == -1 {
- loc, err := time.LoadLocation(val)
- if err != nil {
- return time.Time{}, err
- }
- return t.In(loc), nil
- }
-
- // If the input is not the name of a timezone (for example, 'US/Central'), it should be a numerical offset from UTC
- // in the format ^(+|-)(0[0-9]|1[0-4]):[0-5][0-9]$. The numerical input is parsed in terms of hours and minutes.
- hr, err := strconv.Atoi(string(val[0:ind]))
- if err != nil {
- return time.Time{}, err
- }
- min, err := strconv.Atoi(string(val[ind+1:]))
- if err != nil {
- return time.Time{}, err
- }
- var offset int
- if string(val[0]) == "-" {
- offset = hr*60 - min
- } else {
- offset = hr*60 + min
- }
- secondsEastOfUTC := int((time.Duration(offset) * time.Minute).Seconds())
- timezone := time.FixedZone("", secondsEastOfUTC)
- return t.In(timezone), nil
-}
diff --git a/vendor/github.com/google/cel-go/cel/macro.go b/vendor/github.com/google/cel-go/cel/macro.go
deleted file mode 100644
index 4db1fd57a..000000000
--- a/vendor/github.com/google/cel-go/cel/macro.go
+++ /dev/null
@@ -1,576 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/parser"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// Macro describes a function signature to match and the MacroExpander to apply.
-//
-// Note: when a Macro should apply to multiple overloads (based on arg count) of a given function,
-// a Macro should be created per arg-count or as a var arg macro.
-type Macro = parser.Macro
-
-// MacroFactory defines an expansion function which converts a call and its arguments to a cel.Expr value.
-type MacroFactory = parser.MacroExpander
-
-// MacroExprFactory assists with the creation of Expr values in a manner which is consistent
-// the internal semantics and id generation behaviors of the parser and checker libraries.
-type MacroExprFactory = parser.ExprHelper
-
-// MacroExpander converts a call and its associated arguments into a protobuf Expr representation.
-//
-// If the MacroExpander determines within the implementation that an expansion is not needed it may return
-// a nil Expr value to indicate a non-match. However, if an expansion is to be performed, but the arguments
-// are not well-formed, the result of the expansion will be an error.
-//
-// The MacroExpander accepts as arguments a MacroExprHelper as well as the arguments used in the function call
-// and produces as output an Expr ast node.
-//
-// Note: when the Macro.IsReceiverStyle() method returns true, the target argument will be nil.
-type MacroExpander func(eh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error)
-
-// MacroExprHelper exposes helper methods for creating new expressions within a CEL abstract syntax tree.
-// ExprHelper assists with the manipulation of proto-based Expr values in a manner which is
-// consistent with the source position and expression id generation code leveraged by both
-// the parser and type-checker.
-type MacroExprHelper interface {
- // Copy the input expression with a brand new set of identifiers.
- Copy(*exprpb.Expr) *exprpb.Expr
-
- // LiteralBool creates an Expr value for a bool literal.
- LiteralBool(value bool) *exprpb.Expr
-
- // LiteralBytes creates an Expr value for a byte literal.
- LiteralBytes(value []byte) *exprpb.Expr
-
- // LiteralDouble creates an Expr value for double literal.
- LiteralDouble(value float64) *exprpb.Expr
-
- // LiteralInt creates an Expr value for an int literal.
- LiteralInt(value int64) *exprpb.Expr
-
- // LiteralString creates am Expr value for a string literal.
- LiteralString(value string) *exprpb.Expr
-
- // LiteralUint creates an Expr value for a uint literal.
- LiteralUint(value uint64) *exprpb.Expr
-
- // NewList creates a CreateList instruction where the list is comprised of the optional set
- // of elements provided as arguments.
- NewList(elems ...*exprpb.Expr) *exprpb.Expr
-
- // NewMap creates a CreateStruct instruction for a map where the map is comprised of the
- // optional set of key, value entries.
- NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr
-
- // NewMapEntry creates a Map Entry for the key, value pair.
- NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry
-
- // NewObject creates a CreateStruct instruction for an object with a given type name and
- // optional set of field initializers.
- NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr
-
- // NewObjectFieldInit creates a new Object field initializer from the field name and value.
- NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry
-
- // Fold creates a fold comprehension instruction.
- //
- // - iterVar is the iteration variable name.
- // - iterRange represents the expression that resolves to a list or map where the elements or
- // keys (respectively) will be iterated over.
- // - accuVar is the accumulation variable name, typically parser.AccumulatorName.
- // - accuInit is the initial expression whose value will be set for the accuVar prior to
- // folding.
- // - condition is the expression to test to determine whether to continue folding.
- // - step is the expression to evaluation at the conclusion of a single fold iteration.
- // - result is the computation to evaluate at the conclusion of the fold.
- //
- // The accuVar should not shadow variable names that you would like to reference within the
- // environment in the step and condition expressions. Presently, the name __result__ is commonly
- // used by built-in macros but this may change in the future.
- Fold(iterVar string,
- iterRange *exprpb.Expr,
- accuVar string,
- accuInit *exprpb.Expr,
- condition *exprpb.Expr,
- step *exprpb.Expr,
- result *exprpb.Expr) *exprpb.Expr
-
- // Ident creates an identifier Expr value.
- Ident(name string) *exprpb.Expr
-
- // AccuIdent returns an accumulator identifier for use with comprehension results.
- AccuIdent() *exprpb.Expr
-
- // GlobalCall creates a function call Expr value for a global (free) function.
- GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr
-
- // ReceiverCall creates a function call Expr value for a receiver-style function.
- ReceiverCall(function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr
-
- // PresenceTest creates a Select TestOnly Expr value for modelling has() semantics.
- PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr
-
- // Select create a field traversal Expr value.
- Select(operand *exprpb.Expr, field string) *exprpb.Expr
-
- // OffsetLocation returns the Location of the expression identifier.
- OffsetLocation(exprID int64) common.Location
-
- // NewError associates an error message with a given expression id.
- NewError(exprID int64, message string) *Error
-}
-
-// GlobalMacro creates a Macro for a global function with the specified arg count.
-func GlobalMacro(function string, argCount int, factory MacroFactory) Macro {
- return parser.NewGlobalMacro(function, argCount, factory)
-}
-
-// ReceiverMacro creates a Macro for a receiver function matching the specified arg count.
-func ReceiverMacro(function string, argCount int, factory MacroFactory) Macro {
- return parser.NewReceiverMacro(function, argCount, factory)
-}
-
-// GlobalVarArgMacro creates a Macro for a global function with a variable arg count.
-func GlobalVarArgMacro(function string, factory MacroFactory) Macro {
- return parser.NewGlobalVarArgMacro(function, factory)
-}
-
-// ReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count.
-func ReceiverVarArgMacro(function string, factory MacroFactory) Macro {
- return parser.NewReceiverVarArgMacro(function, factory)
-}
-
-// NewGlobalMacro creates a Macro for a global function with the specified arg count.
-//
-// Deprecated: use GlobalMacro
-func NewGlobalMacro(function string, argCount int, expander MacroExpander) Macro {
- expand := adaptingExpander{expander}
- return parser.NewGlobalMacro(function, argCount, expand.Expander)
-}
-
-// NewReceiverMacro creates a Macro for a receiver function matching the specified arg count.
-//
-// Deprecated: use ReceiverMacro
-func NewReceiverMacro(function string, argCount int, expander MacroExpander) Macro {
- expand := adaptingExpander{expander}
- return parser.NewReceiverMacro(function, argCount, expand.Expander)
-}
-
-// NewGlobalVarArgMacro creates a Macro for a global function with a variable arg count.
-//
-// Deprecated: use GlobalVarArgMacro
-func NewGlobalVarArgMacro(function string, expander MacroExpander) Macro {
- expand := adaptingExpander{expander}
- return parser.NewGlobalVarArgMacro(function, expand.Expander)
-}
-
-// NewReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count.
-//
-// Deprecated: use ReceiverVarArgMacro
-func NewReceiverVarArgMacro(function string, expander MacroExpander) Macro {
- expand := adaptingExpander{expander}
- return parser.NewReceiverVarArgMacro(function, expand.Expander)
-}
-
-// HasMacroExpander expands the input call arguments into a presence test, e.g. has(.field)
-func HasMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) {
- ph, err := toParserHelper(meh)
- if err != nil {
- return nil, err
- }
- arg, err := adaptToExpr(args[0])
- if err != nil {
- return nil, err
- }
- if arg.Kind() == ast.SelectKind {
- s := arg.AsSelect()
- return adaptToProto(ph.NewPresenceTest(s.Operand(), s.FieldName()))
- }
- return nil, ph.NewError(arg.ID(), "invalid argument to has() macro")
-}
-
-// ExistsMacroExpander expands the input call arguments into a comprehension that returns true if any of the
-// elements in the range match the predicate expressions:
-// .exists(, )
-func ExistsMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) {
- ph, err := toParserHelper(meh)
- if err != nil {
- return nil, err
- }
- out, err := parser.MakeExists(ph, mustAdaptToExpr(target), mustAdaptToExprs(args))
- if err != nil {
- return nil, err
- }
- return adaptToProto(out)
-}
-
-// ExistsOneMacroExpander expands the input call arguments into a comprehension that returns true if exactly
-// one of the elements in the range match the predicate expressions:
-// .exists_one(, )
-func ExistsOneMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) {
- ph, err := toParserHelper(meh)
- if err != nil {
- return nil, err
- }
- out, err := parser.MakeExistsOne(ph, mustAdaptToExpr(target), mustAdaptToExprs(args))
- if err != nil {
- return nil, err
- }
- return adaptToProto(out)
-}
-
-// MapMacroExpander expands the input call arguments into a comprehension that transforms each element in the
-// input to produce an output list.
-//
-// There are two call patterns supported by map:
-//
-// .map(, )
-// .map(, , )
-//
-// In the second form only iterVar values which return true when provided to the predicate expression
-// are transformed.
-func MapMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) {
- ph, err := toParserHelper(meh)
- if err != nil {
- return nil, err
- }
- out, err := parser.MakeMap(ph, mustAdaptToExpr(target), mustAdaptToExprs(args))
- if err != nil {
- return nil, err
- }
- return adaptToProto(out)
-}
-
-// FilterMacroExpander expands the input call arguments into a comprehension which produces a list which contains
-// only elements which match the provided predicate expression:
-// .filter(, )
-func FilterMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) {
- ph, err := toParserHelper(meh)
- if err != nil {
- return nil, err
- }
- out, err := parser.MakeFilter(ph, mustAdaptToExpr(target), mustAdaptToExprs(args))
- if err != nil {
- return nil, err
- }
- return adaptToProto(out)
-}
-
-var (
- // Aliases to each macro in the CEL standard environment.
- // Note: reassigning these macro variables may result in undefined behavior.
-
- // HasMacro expands "has(m.f)" which tests the presence of a field, avoiding the need to
- // specify the field as a string.
- HasMacro = parser.HasMacro
-
- // AllMacro expands "range.all(var, predicate)" into a comprehension which ensures that all
- // elements in the range satisfy the predicate.
- AllMacro = parser.AllMacro
-
- // ExistsMacro expands "range.exists(var, predicate)" into a comprehension which ensures that
- // some element in the range satisfies the predicate.
- ExistsMacro = parser.ExistsMacro
-
- // ExistsOneMacro expands "range.exists_one(var, predicate)", which is true if for exactly one
- // element in range the predicate holds.
- ExistsOneMacro = parser.ExistsOneMacro
-
- // MapMacro expands "range.map(var, function)" into a comprehension which applies the function
- // to each element in the range to produce a new list.
- MapMacro = parser.MapMacro
-
- // MapFilterMacro expands "range.map(var, predicate, function)" into a comprehension which
- // first filters the elements in the range by the predicate, then applies the transform function
- // to produce a new list.
- MapFilterMacro = parser.MapFilterMacro
-
- // FilterMacro expands "range.filter(var, predicate)" into a comprehension which filters
- // elements in the range, producing a new list from the elements that satisfy the predicate.
- FilterMacro = parser.FilterMacro
-
- // StandardMacros provides an alias to all the CEL macros defined in the standard environment.
- StandardMacros = []Macro{
- HasMacro, AllMacro, ExistsMacro, ExistsOneMacro, MapMacro, MapFilterMacro, FilterMacro,
- }
-
- // NoMacros provides an alias to an empty list of macros
- NoMacros = []Macro{}
-)
-
-type adaptingExpander struct {
- legacyExpander MacroExpander
-}
-
-func (adapt *adaptingExpander) Expander(eh parser.ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) {
- var legacyTarget *exprpb.Expr = nil
- var err *Error = nil
- if target != nil {
- legacyTarget, err = adaptToProto(target)
- if err != nil {
- return nil, err
- }
- }
- legacyArgs := make([]*exprpb.Expr, len(args))
- for i, arg := range args {
- legacyArgs[i], err = adaptToProto(arg)
- if err != nil {
- return nil, err
- }
- }
- ah := &adaptingHelper{modernHelper: eh}
- legacyExpr, err := adapt.legacyExpander(ah, legacyTarget, legacyArgs)
- if err != nil {
- return nil, err
- }
- ex, err := adaptToExpr(legacyExpr)
- if err != nil {
- return nil, err
- }
- return ex, nil
-}
-
-func wrapErr(id int64, message string, err error) *common.Error {
- return &common.Error{
- Location: common.NoLocation,
- Message: fmt.Sprintf("%s: %v", message, err),
- ExprID: id,
- }
-}
-
-type adaptingHelper struct {
- modernHelper parser.ExprHelper
-}
-
-// Copy the input expression with a brand new set of identifiers.
-func (ah *adaptingHelper) Copy(e *exprpb.Expr) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.Copy(mustAdaptToExpr(e)))
-}
-
-// LiteralBool creates an Expr value for a bool literal.
-func (ah *adaptingHelper) LiteralBool(value bool) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Bool(value)))
-}
-
-// LiteralBytes creates an Expr value for a byte literal.
-func (ah *adaptingHelper) LiteralBytes(value []byte) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Bytes(value)))
-}
-
-// LiteralDouble creates an Expr value for double literal.
-func (ah *adaptingHelper) LiteralDouble(value float64) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Double(value)))
-}
-
-// LiteralInt creates an Expr value for an int literal.
-func (ah *adaptingHelper) LiteralInt(value int64) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Int(value)))
-}
-
-// LiteralString creates am Expr value for a string literal.
-func (ah *adaptingHelper) LiteralString(value string) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewLiteral(types.String(value)))
-}
-
-// LiteralUint creates an Expr value for a uint literal.
-func (ah *adaptingHelper) LiteralUint(value uint64) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Uint(value)))
-}
-
-// NewList creates a CreateList instruction where the list is comprised of the optional set
-// of elements provided as arguments.
-func (ah *adaptingHelper) NewList(elems ...*exprpb.Expr) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewList(mustAdaptToExprs(elems)...))
-}
-
-// NewMap creates a CreateStruct instruction for a map where the map is comprised of the
-// optional set of key, value entries.
-func (ah *adaptingHelper) NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr {
- adaptedEntries := make([]ast.EntryExpr, len(entries))
- for i, e := range entries {
- adaptedEntries[i] = mustAdaptToEntryExpr(e)
- }
- return mustAdaptToProto(ah.modernHelper.NewMap(adaptedEntries...))
-}
-
-// NewMapEntry creates a Map Entry for the key, value pair.
-func (ah *adaptingHelper) NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry {
- return mustAdaptToProtoEntry(
- ah.modernHelper.NewMapEntry(mustAdaptToExpr(key), mustAdaptToExpr(val), optional))
-}
-
-// NewObject creates a CreateStruct instruction for an object with a given type name and
-// optional set of field initializers.
-func (ah *adaptingHelper) NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr {
- adaptedEntries := make([]ast.EntryExpr, len(fieldInits))
- for i, e := range fieldInits {
- adaptedEntries[i] = mustAdaptToEntryExpr(e)
- }
- return mustAdaptToProto(ah.modernHelper.NewStruct(typeName, adaptedEntries...))
-}
-
-// NewObjectFieldInit creates a new Object field initializer from the field name and value.
-func (ah *adaptingHelper) NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry {
- return mustAdaptToProtoEntry(
- ah.modernHelper.NewStructField(field, mustAdaptToExpr(init), optional))
-}
-
-// Fold creates a fold comprehension instruction.
-//
-// - iterVar is the iteration variable name.
-// - iterRange represents the expression that resolves to a list or map where the elements or
-// keys (respectively) will be iterated over.
-// - accuVar is the accumulation variable name, typically parser.AccumulatorName.
-// - accuInit is the initial expression whose value will be set for the accuVar prior to
-// folding.
-// - condition is the expression to test to determine whether to continue folding.
-// - step is the expression to evaluation at the conclusion of a single fold iteration.
-// - result is the computation to evaluate at the conclusion of the fold.
-//
-// The accuVar should not shadow variable names that you would like to reference within the
-// environment in the step and condition expressions. Presently, the name __result__ is commonly
-// used by built-in macros but this may change in the future.
-func (ah *adaptingHelper) Fold(iterVar string,
- iterRange *exprpb.Expr,
- accuVar string,
- accuInit *exprpb.Expr,
- condition *exprpb.Expr,
- step *exprpb.Expr,
- result *exprpb.Expr) *exprpb.Expr {
- return mustAdaptToProto(
- ah.modernHelper.NewComprehension(
- mustAdaptToExpr(iterRange),
- iterVar,
- accuVar,
- mustAdaptToExpr(accuInit),
- mustAdaptToExpr(condition),
- mustAdaptToExpr(step),
- mustAdaptToExpr(result),
- ),
- )
-}
-
-// Ident creates an identifier Expr value.
-func (ah *adaptingHelper) Ident(name string) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewIdent(name))
-}
-
-// AccuIdent returns an accumulator identifier for use with comprehension results.
-func (ah *adaptingHelper) AccuIdent() *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewAccuIdent())
-}
-
-// GlobalCall creates a function call Expr value for a global (free) function.
-func (ah *adaptingHelper) GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr {
- return mustAdaptToProto(ah.modernHelper.NewCall(function, mustAdaptToExprs(args)...))
-}
-
-// ReceiverCall creates a function call Expr value for a receiver-style function.
-func (ah *adaptingHelper) ReceiverCall(function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr {
- return mustAdaptToProto(
- ah.modernHelper.NewMemberCall(function, mustAdaptToExpr(target), mustAdaptToExprs(args)...))
-}
-
-// PresenceTest creates a Select TestOnly Expr value for modelling has() semantics.
-func (ah *adaptingHelper) PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr {
- op := mustAdaptToExpr(operand)
- return mustAdaptToProto(ah.modernHelper.NewPresenceTest(op, field))
-}
-
-// Select create a field traversal Expr value.
-func (ah *adaptingHelper) Select(operand *exprpb.Expr, field string) *exprpb.Expr {
- op := mustAdaptToExpr(operand)
- return mustAdaptToProto(ah.modernHelper.NewSelect(op, field))
-}
-
-// OffsetLocation returns the Location of the expression identifier.
-func (ah *adaptingHelper) OffsetLocation(exprID int64) common.Location {
- return ah.modernHelper.OffsetLocation(exprID)
-}
-
-// NewError associates an error message with a given expression id.
-func (ah *adaptingHelper) NewError(exprID int64, message string) *Error {
- return ah.modernHelper.NewError(exprID, message)
-}
-
-func mustAdaptToExprs(exprs []*exprpb.Expr) []ast.Expr {
- adapted := make([]ast.Expr, len(exprs))
- for i, e := range exprs {
- adapted[i] = mustAdaptToExpr(e)
- }
- return adapted
-}
-
-func mustAdaptToExpr(e *exprpb.Expr) ast.Expr {
- out, _ := adaptToExpr(e)
- return out
-}
-
-func adaptToExpr(e *exprpb.Expr) (ast.Expr, *Error) {
- if e == nil {
- return nil, nil
- }
- out, err := ast.ProtoToExpr(e)
- if err != nil {
- return nil, wrapErr(e.GetId(), "proto conversion failure", err)
- }
- return out, nil
-}
-
-func mustAdaptToEntryExpr(e *exprpb.Expr_CreateStruct_Entry) ast.EntryExpr {
- out, _ := ast.ProtoToEntryExpr(e)
- return out
-}
-
-func mustAdaptToProto(e ast.Expr) *exprpb.Expr {
- out, _ := adaptToProto(e)
- return out
-}
-
-func adaptToProto(e ast.Expr) (*exprpb.Expr, *Error) {
- if e == nil {
- return nil, nil
- }
- out, err := ast.ExprToProto(e)
- if err != nil {
- return nil, wrapErr(e.ID(), "expr conversion failure", err)
- }
- return out, nil
-}
-
-func mustAdaptToProtoEntry(e ast.EntryExpr) *exprpb.Expr_CreateStruct_Entry {
- out, _ := ast.EntryExprToProto(e)
- return out
-}
-
-func toParserHelper(meh MacroExprHelper) (parser.ExprHelper, *Error) {
- ah, ok := meh.(*adaptingHelper)
- if !ok {
- return nil, common.NewError(0,
- fmt.Sprintf("unsupported macro helper: %v (%T)", meh, meh),
- common.NoLocation)
- }
- return ah.modernHelper, nil
-}
diff --git a/vendor/github.com/google/cel-go/cel/optimizer.go b/vendor/github.com/google/cel-go/cel/optimizer.go
deleted file mode 100644
index 99aeeb815..000000000
--- a/vendor/github.com/google/cel-go/cel/optimizer.go
+++ /dev/null
@@ -1,482 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-// StaticOptimizer contains a sequence of ASTOptimizer instances which will be applied in order.
-//
-// The static optimizer normalizes expression ids and type-checking run between optimization
-// passes to ensure that the final optimized output is a valid expression with metadata consistent
-// with what would have been generated from a parsed and checked expression.
-//
-// Note: source position information is best-effort and likely wrong, but optimized expressions
-// should be suitable for calls to parser.Unparse.
-type StaticOptimizer struct {
- optimizers []ASTOptimizer
-}
-
-// NewStaticOptimizer creates a StaticOptimizer with a sequence of ASTOptimizer's to be applied
-// to a checked expression.
-func NewStaticOptimizer(optimizers ...ASTOptimizer) *StaticOptimizer {
- return &StaticOptimizer{
- optimizers: optimizers,
- }
-}
-
-// Optimize applies a sequence of optimizations to an Ast within a given environment.
-//
-// If issues are encountered, the Issues.Err() return value will be non-nil.
-func (opt *StaticOptimizer) Optimize(env *Env, a *Ast) (*Ast, *Issues) {
- // Make a copy of the AST to be optimized.
- optimized := ast.Copy(a.impl)
- ids := newIDGenerator(ast.MaxID(a.impl))
-
- // Create the optimizer context, could be pooled in the future.
- issues := NewIssues(common.NewErrors(a.Source()))
- baseFac := ast.NewExprFactory()
- exprFac := &optimizerExprFactory{
- idGenerator: ids,
- fac: baseFac,
- sourceInfo: optimized.SourceInfo(),
- }
- ctx := &OptimizerContext{
- optimizerExprFactory: exprFac,
- Env: env,
- Issues: issues,
- }
-
- // Apply the optimizations sequentially.
- for _, o := range opt.optimizers {
- optimized = o.Optimize(ctx, optimized)
- if issues.Err() != nil {
- return nil, issues
- }
- // Normalize expression id metadata including coordination with macro call metadata.
- freshIDGen := newIDGenerator(0)
- info := optimized.SourceInfo()
- expr := optimized.Expr()
- normalizeIDs(freshIDGen.renumberStable, expr, info)
- cleanupMacroRefs(expr, info)
-
- // Recheck the updated expression for any possible type-agreement or validation errors.
- parsed := &Ast{
- source: a.Source(),
- impl: ast.NewAST(expr, info)}
- checked, iss := ctx.Check(parsed)
- if iss.Err() != nil {
- return nil, iss
- }
- optimized = checked.impl
- }
-
- // Return the optimized result.
- return &Ast{
- source: a.Source(),
- impl: optimized,
- }, nil
-}
-
-// normalizeIDs ensures that the metadata present with an AST is reset in a manner such
-// that the ids within the expression correspond to the ids within macros.
-func normalizeIDs(idGen ast.IDGenerator, optimized ast.Expr, info *ast.SourceInfo) {
- optimized.RenumberIDs(idGen)
-
- if len(info.MacroCalls()) == 0 {
- return
- }
-
- // First, update the macro call ids themselves.
- callIDMap := map[int64]int64{}
- for id := range info.MacroCalls() {
- callIDMap[id] = idGen(id)
- }
- // Then update the macro call definitions which refer to these ids, but
- // ensure that the updates don't collide and remove macro entries which haven't
- // been visited / updated yet.
- type macroUpdate struct {
- id int64
- call ast.Expr
- }
- macroUpdates := []macroUpdate{}
- for oldID, newID := range callIDMap {
- call, found := info.GetMacroCall(oldID)
- if !found {
- continue
- }
- call.RenumberIDs(idGen)
- macroUpdates = append(macroUpdates, macroUpdate{id: newID, call: call})
- info.ClearMacroCall(oldID)
- }
- for _, u := range macroUpdates {
- info.SetMacroCall(u.id, u.call)
- }
-}
-
-func cleanupMacroRefs(expr ast.Expr, info *ast.SourceInfo) {
- if len(info.MacroCalls()) == 0 {
- return
- }
- // Sanitize the macro call references once the optimized expression has been computed
- // and the ids normalized between the expression and the macros.
- exprRefMap := make(map[int64]struct{})
- ast.PostOrderVisit(expr, ast.NewExprVisitor(func(e ast.Expr) {
- if e.ID() == 0 {
- return
- }
- exprRefMap[e.ID()] = struct{}{}
- }))
- // Update the macro call id references to ensure that macro pointers are
- // updated consistently across macros.
- for _, call := range info.MacroCalls() {
- ast.PostOrderVisit(call, ast.NewExprVisitor(func(e ast.Expr) {
- if e.ID() == 0 {
- return
- }
- exprRefMap[e.ID()] = struct{}{}
- }))
- }
- for id := range info.MacroCalls() {
- if _, found := exprRefMap[id]; !found {
- info.ClearMacroCall(id)
- }
- }
-}
-
-// newIDGenerator ensures that new ids are only created the first time they are encountered.
-func newIDGenerator(seed int64) *idGenerator {
- return &idGenerator{
- idMap: make(map[int64]int64),
- seed: seed,
- }
-}
-
-type idGenerator struct {
- idMap map[int64]int64
- seed int64
-}
-
-func (gen *idGenerator) nextID() int64 {
- gen.seed++
- return gen.seed
-}
-
-func (gen *idGenerator) renumberStable(id int64) int64 {
- if id == 0 {
- return 0
- }
- if newID, found := gen.idMap[id]; found {
- return newID
- }
- nextID := gen.nextID()
- gen.idMap[id] = nextID
- return nextID
-}
-
-// OptimizerContext embeds Env and Issues instances to make it easy to type-check and evaluate
-// subexpressions and report any errors encountered along the way. The context also embeds the
-// optimizerExprFactory which can be used to generate new sub-expressions with expression ids
-// consistent with the expectations of a parsed expression.
-type OptimizerContext struct {
- *Env
- *optimizerExprFactory
- *Issues
-}
-
-// ASTOptimizer applies an optimization over an AST and returns the optimized result.
-type ASTOptimizer interface {
- // Optimize optimizes a type-checked AST within an Environment and accumulates any issues.
- Optimize(*OptimizerContext, *ast.AST) *ast.AST
-}
-
-type optimizerExprFactory struct {
- *idGenerator
- fac ast.ExprFactory
- sourceInfo *ast.SourceInfo
-}
-
-// CopyAST creates a renumbered copy of `Expr` and `SourceInfo` values of the input AST, where the
-// renumbering uses the same scheme as the core optimizer logic ensuring there are no collisions
-// between copies.
-//
-// Use this method before attempting to merge the expression from AST into another.
-func (opt *optimizerExprFactory) CopyAST(a *ast.AST) (ast.Expr, *ast.SourceInfo) {
- idGen := newIDGenerator(opt.nextID())
- defer func() { opt.seed = idGen.nextID() }()
- copyExpr := opt.fac.CopyExpr(a.Expr())
- copyInfo := ast.CopySourceInfo(a.SourceInfo())
- normalizeIDs(idGen.renumberStable, copyExpr, copyInfo)
- return copyExpr, copyInfo
-}
-
-// NewBindMacro creates an AST expression representing the expanded bind() macro, and a macro expression
-// representing the unexpanded call signature to be inserted into the source info macro call metadata.
-func (opt *optimizerExprFactory) NewBindMacro(macroID int64, varName string, varInit, remaining ast.Expr) (astExpr, macroExpr ast.Expr) {
- varID := opt.nextID()
- remainingID := opt.nextID()
- remaining = opt.fac.CopyExpr(remaining)
- remaining.RenumberIDs(func(id int64) int64 {
- if id == macroID {
- return remainingID
- }
- return id
- })
- if call, exists := opt.sourceInfo.GetMacroCall(macroID); exists {
- opt.sourceInfo.SetMacroCall(remainingID, opt.fac.CopyExpr(call))
- }
-
- astExpr = opt.fac.NewComprehension(macroID,
- opt.fac.NewList(opt.nextID(), []ast.Expr{}, []int32{}),
- "#unused",
- varName,
- opt.fac.CopyExpr(varInit),
- opt.fac.NewLiteral(opt.nextID(), types.False),
- opt.fac.NewIdent(varID, varName),
- remaining)
-
- macroExpr = opt.fac.NewMemberCall(0, "bind",
- opt.fac.NewIdent(opt.nextID(), "cel"),
- opt.fac.NewIdent(varID, varName),
- opt.fac.CopyExpr(varInit),
- opt.fac.CopyExpr(remaining))
- opt.sanitizeMacro(macroID, macroExpr)
- return
-}
-
-// NewCall creates a global function call invocation expression.
-//
-// Example:
-//
-// countByField(list, fieldName)
-// - function: countByField
-// - args: [list, fieldName]
-func (opt *optimizerExprFactory) NewCall(function string, args ...ast.Expr) ast.Expr {
- return opt.fac.NewCall(opt.nextID(), function, args...)
-}
-
-// NewMemberCall creates a member function call invocation expression where 'target' is the receiver of the call.
-//
-// Example:
-//
-// list.countByField(fieldName)
-// - function: countByField
-// - target: list
-// - args: [fieldName]
-func (opt *optimizerExprFactory) NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr {
- return opt.fac.NewMemberCall(opt.nextID(), function, target, args...)
-}
-
-// NewIdent creates a new identifier expression.
-//
-// Examples:
-//
-// - simple_var_name
-// - qualified.subpackage.var_name
-func (opt *optimizerExprFactory) NewIdent(name string) ast.Expr {
- return opt.fac.NewIdent(opt.nextID(), name)
-}
-
-// NewLiteral creates a new literal expression value.
-//
-// The range of valid values for a literal generated during optimization is different than for expressions
-// generated via parsing / type-checking, as the ref.Val may be _any_ CEL value so long as the value can
-// be converted back to a literal-like form.
-func (opt *optimizerExprFactory) NewLiteral(value ref.Val) ast.Expr {
- return opt.fac.NewLiteral(opt.nextID(), value)
-}
-
-// NewList creates a list expression with a set of optional indices.
-//
-// Examples:
-//
-// [a, b]
-// - elems: [a, b]
-// - optIndices: []
-//
-// [a, ?b, ?c]
-// - elems: [a, b, c]
-// - optIndices: [1, 2]
-func (opt *optimizerExprFactory) NewList(elems []ast.Expr, optIndices []int32) ast.Expr {
- return opt.fac.NewList(opt.nextID(), elems, optIndices)
-}
-
-// NewMap creates a map from a set of entry expressions which contain a key and value expression.
-func (opt *optimizerExprFactory) NewMap(entries []ast.EntryExpr) ast.Expr {
- return opt.fac.NewMap(opt.nextID(), entries)
-}
-
-// NewMapEntry creates a map entry with a key and value expression and a flag to indicate whether the
-// entry is optional.
-//
-// Examples:
-//
-// {a: b}
-// - key: a
-// - value: b
-// - optional: false
-//
-// {?a: ?b}
-// - key: a
-// - value: b
-// - optional: true
-func (opt *optimizerExprFactory) NewMapEntry(key, value ast.Expr, isOptional bool) ast.EntryExpr {
- return opt.fac.NewMapEntry(opt.nextID(), key, value, isOptional)
-}
-
-// NewHasMacro generates a test-only select expression to be included within an AST and an unexpanded
-// has() macro call signature to be inserted into the source info macro call metadata.
-func (opt *optimizerExprFactory) NewHasMacro(macroID int64, s ast.Expr) (astExpr, macroExpr ast.Expr) {
- sel := s.AsSelect()
- astExpr = opt.fac.NewPresenceTest(macroID, sel.Operand(), sel.FieldName())
- macroExpr = opt.fac.NewCall(0, "has",
- opt.NewSelect(opt.fac.CopyExpr(sel.Operand()), sel.FieldName()))
- opt.sanitizeMacro(macroID, macroExpr)
- return
-}
-
-// NewSelect creates a select expression where a field value is selected from an operand.
-//
-// Example:
-//
-// msg.field_name
-// - operand: msg
-// - field: field_name
-func (opt *optimizerExprFactory) NewSelect(operand ast.Expr, field string) ast.Expr {
- return opt.fac.NewSelect(opt.nextID(), operand, field)
-}
-
-// NewStruct creates a new typed struct value with an set of field initializations.
-//
-// Example:
-//
-// pkg.TypeName{field: value}
-// - typeName: pkg.TypeName
-// - fields: [{field: value}]
-func (opt *optimizerExprFactory) NewStruct(typeName string, fields []ast.EntryExpr) ast.Expr {
- return opt.fac.NewStruct(opt.nextID(), typeName, fields)
-}
-
-// NewStructField creates a struct field initialization.
-//
-// Examples:
-//
-// {count: 3u}
-// - field: count
-// - value: 3u
-// - optional: false
-//
-// {?count: x}
-// - field: count
-// - value: x
-// - optional: true
-func (opt *optimizerExprFactory) NewStructField(field string, value ast.Expr, isOptional bool) ast.EntryExpr {
- return opt.fac.NewStructField(opt.nextID(), field, value, isOptional)
-}
-
-// UpdateExpr updates the target expression with the updated content while preserving macro metadata.
-//
-// There are four scenarios during the update to consider:
-// 1. target is not macro, updated is not macro
-// 2. target is macro, updated is not macro
-// 3. target is macro, updated is macro
-// 4. target is not macro, updated is macro
-//
-// When the target is a macro already, it may either be updated to a new macro function
-// body if the update is also a macro, or it may be removed altogether if the update is
-// a macro.
-//
-// When the update is a macro, then the target references within other macros must be
-// updated to point to the new updated macro. Otherwise, other macros which pointed to
-// the target body must be replaced with copies of the updated expression body.
-func (opt *optimizerExprFactory) UpdateExpr(target, updated ast.Expr) {
- // Update the expression
- target.SetKindCase(updated)
-
- // Early return if there's no macros present sa the source info reflects the
- // macro set from the target and updated expressions.
- if len(opt.sourceInfo.MacroCalls()) == 0 {
- return
- }
- // Determine whether the target expression was a macro.
- _, targetIsMacro := opt.sourceInfo.GetMacroCall(target.ID())
-
- // Determine whether the updated expression was a macro.
- updatedMacro, updatedIsMacro := opt.sourceInfo.GetMacroCall(updated.ID())
-
- if updatedIsMacro {
- // If the updated call was a macro, then updated id maps to target id,
- // and the updated macro moves into the target id slot.
- opt.sourceInfo.ClearMacroCall(updated.ID())
- opt.sourceInfo.SetMacroCall(target.ID(), updatedMacro)
- } else if targetIsMacro {
- // Otherwise if the target expr was a macro, but is no longer, clear
- // the macro reference.
- opt.sourceInfo.ClearMacroCall(target.ID())
- }
-
- // Punch holes in the updated value where macros references exist.
- macroExpr := opt.fac.CopyExpr(target)
- macroRefVisitor := ast.NewExprVisitor(func(e ast.Expr) {
- if _, exists := opt.sourceInfo.GetMacroCall(e.ID()); exists {
- e.SetKindCase(nil)
- }
- })
- ast.PostOrderVisit(macroExpr, macroRefVisitor)
-
- // Update any references to the expression within a macro
- macroVisitor := ast.NewExprVisitor(func(call ast.Expr) {
- // Update the target expression to point to the macro expression which
- // will be empty if the updated expression was a macro.
- if call.ID() == target.ID() {
- call.SetKindCase(opt.fac.CopyExpr(macroExpr))
- }
- // Update the macro call expression if it refers to the updated expression
- // id which has since been remapped to the target id.
- if call.ID() == updated.ID() {
- // Either ensure the expression is a macro reference or a populated with
- // the relevant sub-expression if the updated expr was not a macro.
- if updatedIsMacro {
- call.SetKindCase(nil)
- } else {
- call.SetKindCase(opt.fac.CopyExpr(macroExpr))
- }
- // Since SetKindCase does not renumber the id, ensure the references to
- // the old 'updated' id are mapped to the target id.
- call.RenumberIDs(func(id int64) int64 {
- if id == updated.ID() {
- return target.ID()
- }
- return id
- })
- }
- })
- for _, call := range opt.sourceInfo.MacroCalls() {
- ast.PostOrderVisit(call, macroVisitor)
- }
-}
-
-func (opt *optimizerExprFactory) sanitizeMacro(macroID int64, macroExpr ast.Expr) {
- macroRefVisitor := ast.NewExprVisitor(func(e ast.Expr) {
- if _, exists := opt.sourceInfo.GetMacroCall(e.ID()); exists && e.ID() != macroID {
- e.SetKindCase(nil)
- }
- })
- ast.PostOrderVisit(macroExpr, macroRefVisitor)
-}
diff --git a/vendor/github.com/google/cel-go/cel/options.go b/vendor/github.com/google/cel-go/cel/options.go
deleted file mode 100644
index 3c53e21af..000000000
--- a/vendor/github.com/google/cel-go/cel/options.go
+++ /dev/null
@@ -1,661 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "fmt"
-
- "google.golang.org/protobuf/proto"
- "google.golang.org/protobuf/reflect/protodesc"
- "google.golang.org/protobuf/reflect/protoreflect"
- "google.golang.org/protobuf/reflect/protoregistry"
- "google.golang.org/protobuf/types/dynamicpb"
-
- "github.com/google/cel-go/checker"
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/functions"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/pb"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/interpreter"
- "github.com/google/cel-go/parser"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
- descpb "google.golang.org/protobuf/types/descriptorpb"
-)
-
-// These constants beginning with "Feature" enable optional behavior in
-// the library. See the documentation for each constant to see its
-// effects, compatibility restrictions, and standard conformance.
-const (
- _ = iota
-
- // Enable the tracking of function call expressions replaced by macros.
- featureEnableMacroCallTracking
-
- // Enable the use of cross-type numeric comparisons at the type-checker.
- featureCrossTypeNumericComparisons
-
- // Enable eager validation of declarations to ensure that Env values created
- // with `Extend` inherit a validated list of declarations from the parent Env.
- featureEagerlyValidateDeclarations
-
- // Enable the use of the default UTC timezone when a timezone is not specified
- // on a CEL timestamp operation. This fixes the scenario where the input time
- // is not already in UTC.
- featureDefaultUTCTimeZone
-
- // Enable the serialization of logical operator ASTs as variadic calls, thus
- // compressing the logic graph to a single call when multiple like-operator
- // expressions occur: e.g. a && b && c && d -> call(_&&_, [a, b, c, d])
- featureVariadicLogicalASTs
-)
-
-// EnvOption is a functional interface for configuring the environment.
-type EnvOption func(e *Env) (*Env, error)
-
-// ClearMacros options clears all parser macros.
-//
-// Clearing macros will ensure CEL expressions can only contain linear evaluation paths, as
-// comprehensions such as `all` and `exists` are enabled only via macros.
-func ClearMacros() EnvOption {
- return func(e *Env) (*Env, error) {
- e.macros = NoMacros
- return e, nil
- }
-}
-
-// CustomTypeAdapter swaps the default types.Adapter implementation with a custom one.
-//
-// Note: This option must be specified before the Types and TypeDescs options when used together.
-func CustomTypeAdapter(adapter types.Adapter) EnvOption {
- return func(e *Env) (*Env, error) {
- e.adapter = adapter
- return e, nil
- }
-}
-
-// CustomTypeProvider replaces the types.Provider implementation with a custom one.
-//
-// The `provider` variable type may either be types.Provider or ref.TypeProvider (deprecated)
-//
-// Note: This option must be specified before the Types and TypeDescs options when used together.
-func CustomTypeProvider(provider any) EnvOption {
- return func(e *Env) (*Env, error) {
- var err error
- e.provider, err = maybeInteropProvider(provider)
- return e, err
- }
-}
-
-// Declarations option extends the declaration set configured in the environment.
-//
-// Note: Declarations will by default be appended to the pre-existing declaration set configured
-// for the environment. The NewEnv call builds on top of the standard CEL declarations. For a
-// purely custom set of declarations use NewCustomEnv.
-func Declarations(decls ...*exprpb.Decl) EnvOption {
- declOpts := []EnvOption{}
- var err error
- var opt EnvOption
- // Convert the declarations to `EnvOption` values ahead of time.
- // Surface any errors in conversion when the options are applied.
- for _, d := range decls {
- opt, err = ExprDeclToDeclaration(d)
- if err != nil {
- break
- }
- declOpts = append(declOpts, opt)
- }
- return func(e *Env) (*Env, error) {
- if err != nil {
- return nil, err
- }
- for _, o := range declOpts {
- e, err = o(e)
- if err != nil {
- return nil, err
- }
- }
- return e, nil
- }
-}
-
-// EagerlyValidateDeclarations ensures that any collisions between configured declarations are caught
-// at the time of the `NewEnv` call.
-//
-// Eagerly validating declarations is also useful for bootstrapping a base `cel.Env` value.
-// Calls to base `Env.Extend()` will be significantly faster when declarations are eagerly validated
-// as declarations will be collision-checked at most once and only incrementally by way of `Extend`
-//
-// Disabled by default as not all environments are used for type-checking.
-func EagerlyValidateDeclarations(enabled bool) EnvOption {
- return features(featureEagerlyValidateDeclarations, enabled)
-}
-
-// HomogeneousAggregateLiterals disables mixed type list and map literal values.
-//
-// Note, it is still possible to have heterogeneous aggregates when provided as variables to the
-// expression, as well as via conversion of well-known dynamic types, or with unchecked
-// expressions.
-func HomogeneousAggregateLiterals() EnvOption {
- return ASTValidators(ValidateHomogeneousAggregateLiterals())
-}
-
-// variadicLogicalOperatorASTs flatten like-operator chained logical expressions into a single
-// variadic call with N-terms. This behavior is useful when serializing to a protocol buffer as
-// it will reduce the number of recursive calls needed to deserialize the AST later.
-//
-// For example, given the following expression the call graph will be rendered accordingly:
-//
-// expression: a && b && c && (d || e)
-// ast: call(_&&_, [a, b, c, call(_||_, [d, e])])
-func variadicLogicalOperatorASTs() EnvOption {
- return features(featureVariadicLogicalASTs, true)
-}
-
-// Macros option extends the macro set configured in the environment.
-//
-// Note: This option must be specified after ClearMacros if used together.
-func Macros(macros ...Macro) EnvOption {
- return func(e *Env) (*Env, error) {
- e.macros = append(e.macros, macros...)
- return e, nil
- }
-}
-
-// Container sets the container for resolving variable names. Defaults to an empty container.
-//
-// If all references within an expression are relative to a protocol buffer package, then
-// specifying a container of `google.type` would make it possible to write expressions such as
-// `Expr{expression: 'a < b'}` instead of having to write `google.type.Expr{...}`.
-func Container(name string) EnvOption {
- return func(e *Env) (*Env, error) {
- cont, err := e.Container.Extend(containers.Name(name))
- if err != nil {
- return nil, err
- }
- e.Container = cont
- return e, nil
- }
-}
-
-// Abbrevs configures a set of simple names as abbreviations for fully-qualified names.
-//
-// An abbreviation (abbrev for short) is a simple name that expands to a fully-qualified name.
-// Abbreviations can be useful when working with variables, functions, and especially types from
-// multiple namespaces:
-//
-// // CEL object construction
-// qual.pkg.version.ObjTypeName{
-// field: alt.container.ver.FieldTypeName{value: ...}
-// }
-//
-// Only one the qualified names above may be used as the CEL container, so at least one of these
-// references must be a long qualified name within an otherwise short CEL program. Using the
-// following abbreviations, the program becomes much simpler:
-//
-// // CEL Go option
-// Abbrevs("qual.pkg.version.ObjTypeName", "alt.container.ver.FieldTypeName")
-// // Simplified Object construction
-// ObjTypeName{field: FieldTypeName{value: ...}}
-//
-// There are a few rules for the qualified names and the simple abbreviations generated from them:
-// - Qualified names must be dot-delimited, e.g. `package.subpkg.name`.
-// - The last element in the qualified name is the abbreviation.
-// - Abbreviations must not collide with each other.
-// - The abbreviation must not collide with unqualified names in use.
-//
-// Abbreviations are distinct from container-based references in the following important ways:
-// - Abbreviations must expand to a fully-qualified name.
-// - Expanded abbreviations do not participate in namespace resolution.
-// - Abbreviation expansion is done instead of the container search for a matching identifier.
-// - Containers follow C++ namespace resolution rules with searches from the most qualified name
-//
-// to the least qualified name.
-//
-// - Container references within the CEL program may be relative, and are resolved to fully
-//
-// qualified names at either type-check time or program plan time, whichever comes first.
-//
-// If there is ever a case where an identifier could be in both the container and as an
-// abbreviation, the abbreviation wins as this will ensure that the meaning of a program is
-// preserved between compilations even as the container evolves.
-func Abbrevs(qualifiedNames ...string) EnvOption {
- return func(e *Env) (*Env, error) {
- cont, err := e.Container.Extend(containers.Abbrevs(qualifiedNames...))
- if err != nil {
- return nil, err
- }
- e.Container = cont
- return e, nil
- }
-}
-
-// Types adds one or more type declarations to the environment, allowing for construction of
-// type-literals whose definitions are included in the common expression built-in set.
-//
-// The input types may either be instances of `proto.Message` or `ref.Type`. Any other type
-// provided to this option will result in an error.
-//
-// Well-known protobuf types within the `google.protobuf.*` package are included in the standard
-// environment by default.
-//
-// Note: This option must be specified after the CustomTypeProvider option when used together.
-func Types(addTypes ...any) EnvOption {
- return func(e *Env) (*Env, error) {
- var reg ref.TypeRegistry
- var isReg bool
- reg, isReg = e.provider.(*types.Registry)
- if !isReg {
- reg, isReg = e.provider.(ref.TypeRegistry)
- }
- if !isReg {
- return nil, fmt.Errorf("custom types not supported by provider: %T", e.provider)
- }
- for _, t := range addTypes {
- switch v := t.(type) {
- case proto.Message:
- fdMap := pb.CollectFileDescriptorSet(v)
- for _, fd := range fdMap {
- err := reg.RegisterDescriptor(fd)
- if err != nil {
- return nil, err
- }
- }
- case ref.Type:
- err := reg.RegisterType(v)
- if err != nil {
- return nil, err
- }
- default:
- return nil, fmt.Errorf("unsupported type: %T", t)
- }
- }
- return e, nil
- }
-}
-
-// TypeDescs adds type declarations from any protoreflect.FileDescriptor, protoregistry.Files,
-// google.protobuf.FileDescriptorProto or google.protobuf.FileDescriptorSet provided.
-//
-// Note that messages instantiated from these descriptors will be *dynamicpb.Message values
-// rather than the concrete message type.
-//
-// TypeDescs are hermetic to a single Env object, but may be copied to other Env values via
-// extension or by re-using the same EnvOption with another NewEnv() call.
-func TypeDescs(descs ...any) EnvOption {
- return func(e *Env) (*Env, error) {
- reg, isReg := e.provider.(ref.TypeRegistry)
- if !isReg {
- return nil, fmt.Errorf("custom types not supported by provider: %T", e.provider)
- }
- // Scan the input descriptors for FileDescriptorProto messages and accumulate them into a
- // synthetic FileDescriptorSet as the FileDescriptorProto messages may refer to each other
- // and will not resolve properly unless they are part of the same set.
- var fds *descpb.FileDescriptorSet
- for _, d := range descs {
- switch f := d.(type) {
- case *descpb.FileDescriptorProto:
- if fds == nil {
- fds = &descpb.FileDescriptorSet{
- File: []*descpb.FileDescriptorProto{},
- }
- }
- fds.File = append(fds.File, f)
- }
- }
- if fds != nil {
- if err := registerFileSet(reg, fds); err != nil {
- return nil, err
- }
- }
- for _, d := range descs {
- switch f := d.(type) {
- case *protoregistry.Files:
- if err := registerFiles(reg, f); err != nil {
- return nil, err
- }
- case protoreflect.FileDescriptor:
- if err := reg.RegisterDescriptor(f); err != nil {
- return nil, err
- }
- case *descpb.FileDescriptorSet:
- if err := registerFileSet(reg, f); err != nil {
- return nil, err
- }
- case *descpb.FileDescriptorProto:
- // skip, handled as a synthetic file descriptor set.
- default:
- return nil, fmt.Errorf("unsupported type descriptor: %T", d)
- }
- }
- return e, nil
- }
-}
-
-func registerFileSet(reg ref.TypeRegistry, fileSet *descpb.FileDescriptorSet) error {
- files, err := protodesc.NewFiles(fileSet)
- if err != nil {
- return fmt.Errorf("protodesc.NewFiles(%v) failed: %v", fileSet, err)
- }
- return registerFiles(reg, files)
-}
-
-func registerFiles(reg ref.TypeRegistry, files *protoregistry.Files) error {
- var err error
- files.RangeFiles(func(fd protoreflect.FileDescriptor) bool {
- err = reg.RegisterDescriptor(fd)
- return err == nil
- })
- return err
-}
-
-// ProgramOption is a functional interface for configuring evaluation bindings and behaviors.
-type ProgramOption func(p *prog) (*prog, error)
-
-// CustomDecorator appends an InterpreterDecorator to the program.
-//
-// InterpretableDecorators can be used to inspect, alter, or replace the Program plan.
-func CustomDecorator(dec interpreter.InterpretableDecorator) ProgramOption {
- return func(p *prog) (*prog, error) {
- p.decorators = append(p.decorators, dec)
- return p, nil
- }
-}
-
-// Functions adds function overloads that extend or override the set of CEL built-ins.
-//
-// Deprecated: use Function() instead to declare the function, its overload signatures,
-// and the overload implementations.
-func Functions(funcs ...*functions.Overload) ProgramOption {
- return func(p *prog) (*prog, error) {
- if err := p.dispatcher.Add(funcs...); err != nil {
- return nil, err
- }
- return p, nil
- }
-}
-
-// Globals sets the global variable values for a given program. These values may be shadowed by
-// variables with the same name provided to the Eval() call. If Globals is used in a Library with
-// a Lib EnvOption, vars may shadow variables provided by previously added libraries.
-//
-// The vars value may either be an `interpreter.Activation` instance or a `map[string]any`.
-func Globals(vars any) ProgramOption {
- return func(p *prog) (*prog, error) {
- defaultVars, err := interpreter.NewActivation(vars)
- if err != nil {
- return nil, err
- }
- if p.defaultVars != nil {
- defaultVars = interpreter.NewHierarchicalActivation(p.defaultVars, defaultVars)
- }
- p.defaultVars = defaultVars
- return p, nil
- }
-}
-
-// OptimizeRegex provides a way to replace the InterpretableCall for regex functions. This can be used
-// to compile regex string constants at program creation time and report any errors and then use the
-// compiled regex for all regex function invocations.
-func OptimizeRegex(regexOptimizations ...*interpreter.RegexOptimization) ProgramOption {
- return func(p *prog) (*prog, error) {
- p.regexOptimizations = append(p.regexOptimizations, regexOptimizations...)
- return p, nil
- }
-}
-
-// EvalOption indicates an evaluation option that may affect the evaluation behavior or information
-// in the output result.
-type EvalOption int
-
-const (
- // OptTrackState will cause the runtime to return an immutable EvalState value in the Result.
- OptTrackState EvalOption = 1 << iota
-
- // OptExhaustiveEval causes the runtime to disable short-circuits and track state.
- OptExhaustiveEval EvalOption = 1< 0 {
- decorators = append(decorators, interpreter.InterruptableEval())
- }
- // Enable constant folding first.
- if p.evalOpts&OptOptimize == OptOptimize {
- decorators = append(decorators, interpreter.Optimize())
- p.regexOptimizations = append(p.regexOptimizations, interpreter.MatchesRegexOptimization)
- }
- // Enable regex compilation of constants immediately after folding constants.
- if len(p.regexOptimizations) > 0 {
- decorators = append(decorators, interpreter.CompileRegexConstants(p.regexOptimizations...))
- }
-
- // Enable exhaustive eval, state tracking and cost tracking last since they require a factory.
- if p.evalOpts&(OptExhaustiveEval|OptTrackState|OptTrackCost) != 0 {
- factory := func(state interpreter.EvalState, costTracker *interpreter.CostTracker) (Program, error) {
- costTracker.Estimator = p.callCostEstimator
- costTracker.Limit = p.costLimit
- for _, costOpt := range p.costOptions {
- err := costOpt(costTracker)
- if err != nil {
- return nil, err
- }
- }
- // Limit capacity to guarantee a reallocation when calling 'append(decs, ...)' below. This
- // prevents the underlying memory from being shared between factory function calls causing
- // undesired mutations.
- decs := decorators[:len(decorators):len(decorators)]
- var observers []interpreter.EvalObserver
-
- if p.evalOpts&(OptExhaustiveEval|OptTrackState) != 0 {
- // EvalStateObserver is required for OptExhaustiveEval.
- observers = append(observers, interpreter.EvalStateObserver(state))
- }
- if p.evalOpts&OptTrackCost == OptTrackCost {
- observers = append(observers, interpreter.CostObserver(costTracker))
- }
-
- // Enable exhaustive eval over a basic observer since it offers a superset of features.
- if p.evalOpts&OptExhaustiveEval == OptExhaustiveEval {
- decs = append(decs, interpreter.ExhaustiveEval(), interpreter.Observe(observers...))
- } else if len(observers) > 0 {
- decs = append(decs, interpreter.Observe(observers...))
- }
-
- return p.clone().initInterpretable(a, decs)
- }
- return newProgGen(factory)
- }
- return p.initInterpretable(a, decorators)
-}
-
-func (p *prog) initInterpretable(a *Ast, decs []interpreter.InterpretableDecorator) (*prog, error) {
- // When the AST has been exprAST it contains metadata that can be used to speed up program execution.
- interpretable, err := p.interpreter.NewInterpretable(a.impl, decs...)
- if err != nil {
- return nil, err
- }
- p.interpretable = interpretable
- return p, nil
-}
-
-// Eval implements the Program interface method.
-func (p *prog) Eval(input any) (v ref.Val, det *EvalDetails, err error) {
- // Configure error recovery for unexpected panics during evaluation. Note, the use of named
- // return values makes it possible to modify the error response during the recovery
- // function.
- defer func() {
- if r := recover(); r != nil {
- switch t := r.(type) {
- case interpreter.EvalCancelledError:
- err = t
- default:
- err = fmt.Errorf("internal error: %v", r)
- }
- }
- }()
- // Build a hierarchical activation if there are default vars set.
- var vars interpreter.Activation
- switch v := input.(type) {
- case interpreter.Activation:
- vars = v
- case map[string]any:
- vars = activationPool.Setup(v)
- defer activationPool.Put(vars)
- default:
- return nil, nil, fmt.Errorf("invalid input, wanted Activation or map[string]any, got: (%T)%v", input, input)
- }
- if p.defaultVars != nil {
- vars = interpreter.NewHierarchicalActivation(p.defaultVars, vars)
- }
- v = p.interpretable.Eval(vars)
- // The output of an internal Eval may have a value (`v`) that is a types.Err. This step
- // translates the CEL value to a Go error response. This interface does not quite match the
- // RPC signature which allows for multiple errors to be returned, but should be sufficient.
- if types.IsError(v) {
- err = v.(*types.Err)
- }
- return
-}
-
-// ContextEval implements the Program interface.
-func (p *prog) ContextEval(ctx context.Context, input any) (ref.Val, *EvalDetails, error) {
- if ctx == nil {
- return nil, nil, fmt.Errorf("context can not be nil")
- }
- // Configure the input, making sure to wrap Activation inputs in the special ctxActivation which
- // exposes the #interrupted variable and manages rate-limited checks of the ctx.Done() state.
- var vars interpreter.Activation
- switch v := input.(type) {
- case interpreter.Activation:
- vars = ctxActivationPool.Setup(v, ctx.Done(), p.interruptCheckFrequency)
- defer ctxActivationPool.Put(vars)
- case map[string]any:
- rawVars := activationPool.Setup(v)
- defer activationPool.Put(rawVars)
- vars = ctxActivationPool.Setup(rawVars, ctx.Done(), p.interruptCheckFrequency)
- defer ctxActivationPool.Put(vars)
- default:
- return nil, nil, fmt.Errorf("invalid input, wanted Activation or map[string]any, got: (%T)%v", input, input)
- }
- return p.Eval(vars)
-}
-
-// progFactory is a helper alias for marking a program creation factory function.
-type progFactory func(interpreter.EvalState, *interpreter.CostTracker) (Program, error)
-
-// progGen holds a reference to a progFactory instance and implements the Program interface.
-type progGen struct {
- factory progFactory
-}
-
-// newProgGen tests the factory object by calling it once and returns a factory-based Program if
-// the test is successful.
-func newProgGen(factory progFactory) (Program, error) {
- // Test the factory to make sure that configuration errors are spotted at config
- tracker, err := interpreter.NewCostTracker(nil)
- if err != nil {
- return nil, err
- }
- _, err = factory(interpreter.NewEvalState(), tracker)
- if err != nil {
- return nil, err
- }
- return &progGen{factory: factory}, nil
-}
-
-// Eval implements the Program interface method.
-func (gen *progGen) Eval(input any) (ref.Val, *EvalDetails, error) {
- // The factory based Eval() differs from the standard evaluation model in that it generates a
- // new EvalState instance for each call to ensure that unique evaluations yield unique stateful
- // results.
- state := interpreter.NewEvalState()
- costTracker, err := interpreter.NewCostTracker(nil)
- if err != nil {
- return nil, nil, err
- }
- det := &EvalDetails{state: state, costTracker: costTracker}
-
- // Generate a new instance of the interpretable using the factory configured during the call to
- // newProgram(). It is incredibly unlikely that the factory call will generate an error given
- // the factory test performed within the Program() call.
- p, err := gen.factory(state, costTracker)
- if err != nil {
- return nil, det, err
- }
-
- // Evaluate the input, returning the result and the 'state' within EvalDetails.
- v, _, err := p.Eval(input)
- if err != nil {
- return v, det, err
- }
- return v, det, nil
-}
-
-// ContextEval implements the Program interface method.
-func (gen *progGen) ContextEval(ctx context.Context, input any) (ref.Val, *EvalDetails, error) {
- if ctx == nil {
- return nil, nil, fmt.Errorf("context can not be nil")
- }
- // The factory based Eval() differs from the standard evaluation model in that it generates a
- // new EvalState instance for each call to ensure that unique evaluations yield unique stateful
- // results.
- state := interpreter.NewEvalState()
- costTracker, err := interpreter.NewCostTracker(nil)
- if err != nil {
- return nil, nil, err
- }
- det := &EvalDetails{state: state, costTracker: costTracker}
-
- // Generate a new instance of the interpretable using the factory configured during the call to
- // newProgram(). It is incredibly unlikely that the factory call will generate an error given
- // the factory test performed within the Program() call.
- p, err := gen.factory(state, costTracker)
- if err != nil {
- return nil, det, err
- }
-
- // Evaluate the input, returning the result and the 'state' within EvalDetails.
- v, _, err := p.ContextEval(ctx, input)
- if err != nil {
- return v, det, err
- }
- return v, det, nil
-}
-
-type ctxEvalActivation struct {
- parent interpreter.Activation
- interrupt <-chan struct{}
- interruptCheckCount uint
- interruptCheckFrequency uint
-}
-
-// ResolveName implements the Activation interface method, but adds a special #interrupted variable
-// which is capable of testing whether a 'done' signal is provided from a context.Context channel.
-func (a *ctxEvalActivation) ResolveName(name string) (any, bool) {
- if name == "#interrupted" {
- a.interruptCheckCount++
- if a.interruptCheckCount%a.interruptCheckFrequency == 0 {
- select {
- case <-a.interrupt:
- return true, true
- default:
- return nil, false
- }
- }
- return nil, false
- }
- return a.parent.ResolveName(name)
-}
-
-func (a *ctxEvalActivation) Parent() interpreter.Activation {
- return a.parent
-}
-
-func newCtxEvalActivationPool() *ctxEvalActivationPool {
- return &ctxEvalActivationPool{
- Pool: sync.Pool{
- New: func() any {
- return &ctxEvalActivation{}
- },
- },
- }
-}
-
-type ctxEvalActivationPool struct {
- sync.Pool
-}
-
-// Setup initializes a pooled Activation with the ability check for context.Context cancellation
-func (p *ctxEvalActivationPool) Setup(vars interpreter.Activation, done <-chan struct{}, interruptCheckRate uint) *ctxEvalActivation {
- a := p.Pool.Get().(*ctxEvalActivation)
- a.parent = vars
- a.interrupt = done
- a.interruptCheckCount = 0
- a.interruptCheckFrequency = interruptCheckRate
- return a
-}
-
-type evalActivation struct {
- vars map[string]any
- lazyVars map[string]any
-}
-
-// ResolveName looks up the value of the input variable name, if found.
-//
-// Lazy bindings may be supplied within the map-based input in either of the following forms:
-// - func() any
-// - func() ref.Val
-//
-// The lazy binding will only be invoked once per evaluation.
-//
-// Values which are not represented as ref.Val types on input may be adapted to a ref.Val using
-// the types.Adapter configured in the environment.
-func (a *evalActivation) ResolveName(name string) (any, bool) {
- v, found := a.vars[name]
- if !found {
- return nil, false
- }
- switch obj := v.(type) {
- case func() ref.Val:
- if resolved, found := a.lazyVars[name]; found {
- return resolved, true
- }
- lazy := obj()
- a.lazyVars[name] = lazy
- return lazy, true
- case func() any:
- if resolved, found := a.lazyVars[name]; found {
- return resolved, true
- }
- lazy := obj()
- a.lazyVars[name] = lazy
- return lazy, true
- default:
- return obj, true
- }
-}
-
-// Parent implements the interpreter.Activation interface
-func (a *evalActivation) Parent() interpreter.Activation {
- return nil
-}
-
-func newEvalActivationPool() *evalActivationPool {
- return &evalActivationPool{
- Pool: sync.Pool{
- New: func() any {
- return &evalActivation{lazyVars: make(map[string]any)}
- },
- },
- }
-}
-
-type evalActivationPool struct {
- sync.Pool
-}
-
-// Setup initializes a pooled Activation object with the map input.
-func (p *evalActivationPool) Setup(vars map[string]any) *evalActivation {
- a := p.Pool.Get().(*evalActivation)
- a.vars = vars
- return a
-}
-
-func (p *evalActivationPool) Put(value any) {
- a := value.(*evalActivation)
- for k := range a.lazyVars {
- delete(a.lazyVars, k)
- }
- p.Pool.Put(a)
-}
-
-var (
- // activationPool is an internally managed pool of Activation values that wrap map[string]any inputs
- activationPool = newEvalActivationPool()
-
- // ctxActivationPool is an internally managed pool of Activation values that expose a special #interrupted variable
- ctxActivationPool = newCtxEvalActivationPool()
-)
diff --git a/vendor/github.com/google/cel-go/cel/validator.go b/vendor/github.com/google/cel-go/cel/validator.go
deleted file mode 100644
index b50c67452..000000000
--- a/vendor/github.com/google/cel-go/cel/validator.go
+++ /dev/null
@@ -1,375 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package cel
-
-import (
- "fmt"
- "reflect"
- "regexp"
-
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/overloads"
-)
-
-const (
- homogeneousValidatorName = "cel.lib.std.validate.types.homogeneous"
-
- // HomogeneousAggregateLiteralExemptFunctions is the ValidatorConfig key used to configure
- // the set of function names which are exempt from homogeneous type checks. The expected type
- // is a string list of function names.
- //
- // As an example, the `.format([args])` call expects the input arguments list to be
- // comprised of a variety of types which correspond to the types expected by the format control
- // clauses; however, all other uses of a mixed element type list, would be unexpected.
- HomogeneousAggregateLiteralExemptFunctions = homogeneousValidatorName + ".exempt"
-)
-
-// ASTValidators configures a set of ASTValidator instances into the target environment.
-//
-// Validators are applied in the order in which the are specified and are treated as singletons.
-// The same ASTValidator with a given name will not be applied more than once.
-func ASTValidators(validators ...ASTValidator) EnvOption {
- return func(e *Env) (*Env, error) {
- for _, v := range validators {
- if !e.HasValidator(v.Name()) {
- e.validators = append(e.validators, v)
- }
- }
- return e, nil
- }
-}
-
-// ASTValidator defines a singleton interface for validating a type-checked Ast against an environment.
-//
-// Note: the Issues argument is mutable in the sense that it is intended to collect errors which will be
-// reported to the caller.
-type ASTValidator interface {
- // Name returns the name of the validator. Names must be unique.
- Name() string
-
- // Validate validates a given Ast within an Environment and collects a set of potential issues.
- //
- // The ValidatorConfig is generated from the set of ASTValidatorConfigurer instances prior to
- // the invocation of the Validate call. The expectation is that the validator configuration
- // is created in sequence and immutable once provided to the Validate call.
- //
- // See individual validators for more information on their configuration keys and configuration
- // properties.
- Validate(*Env, ValidatorConfig, *ast.AST, *Issues)
-}
-
-// ValidatorConfig provides an accessor method for querying validator configuration state.
-type ValidatorConfig interface {
- GetOrDefault(name string, value any) any
-}
-
-// MutableValidatorConfig provides mutation methods for querying and updating validator configuration
-// settings.
-type MutableValidatorConfig interface {
- ValidatorConfig
- Set(name string, value any) error
-}
-
-// ASTValidatorConfigurer indicates that this object, currently expected to be an ASTValidator,
-// participates in validator configuration settings.
-//
-// This interface may be split from the expectation of being an ASTValidator instance in the future.
-type ASTValidatorConfigurer interface {
- Configure(MutableValidatorConfig) error
-}
-
-// validatorConfig implements the ValidatorConfig and MutableValidatorConfig interfaces.
-type validatorConfig struct {
- data map[string]any
-}
-
-// newValidatorConfig initializes the validator config with default values for core CEL validators.
-func newValidatorConfig() *validatorConfig {
- return &validatorConfig{
- data: map[string]any{
- HomogeneousAggregateLiteralExemptFunctions: []string{},
- },
- }
-}
-
-// GetOrDefault returns the configured value for the name, if present, else the input default value.
-//
-// Note, the type-agreement between the input default and configured value is not checked on read.
-func (config *validatorConfig) GetOrDefault(name string, value any) any {
- v, found := config.data[name]
- if !found {
- return value
- }
- return v
-}
-
-// Set configures a validator option with the given name and value.
-//
-// If the value had previously been set, the new value must have the same reflection type as the old one,
-// or the call will error.
-func (config *validatorConfig) Set(name string, value any) error {
- v, found := config.data[name]
- if found && reflect.TypeOf(v) != reflect.TypeOf(value) {
- return fmt.Errorf("incompatible configuration type for %s, got %T, wanted %T", name, value, v)
- }
- config.data[name] = value
- return nil
-}
-
-// ExtendedValidations collects a set of common AST validations which reduce the likelihood of runtime errors.
-//
-// - Validate duration and timestamp literals
-// - Ensure regex strings are valid
-// - Disable mixed type list and map literals
-func ExtendedValidations() EnvOption {
- return ASTValidators(
- ValidateDurationLiterals(),
- ValidateTimestampLiterals(),
- ValidateRegexLiterals(),
- ValidateHomogeneousAggregateLiterals(),
- )
-}
-
-// ValidateDurationLiterals ensures that duration literal arguments are valid immediately after type-check.
-func ValidateDurationLiterals() ASTValidator {
- return newFormatValidator(overloads.TypeConvertDuration, 0, evalCall)
-}
-
-// ValidateTimestampLiterals ensures that timestamp literal arguments are valid immediately after type-check.
-func ValidateTimestampLiterals() ASTValidator {
- return newFormatValidator(overloads.TypeConvertTimestamp, 0, evalCall)
-}
-
-// ValidateRegexLiterals ensures that regex patterns are validated after type-check.
-func ValidateRegexLiterals() ASTValidator {
- return newFormatValidator(overloads.Matches, 0, compileRegex)
-}
-
-// ValidateHomogeneousAggregateLiterals checks that all list and map literals entries have the same types, i.e.
-// no mixed list element types or mixed map key or map value types.
-//
-// Note: the string format call relies on a mixed element type list for ease of use, so this check skips all
-// literals which occur within string format calls.
-func ValidateHomogeneousAggregateLiterals() ASTValidator {
- return homogeneousAggregateLiteralValidator{}
-}
-
-// ValidateComprehensionNestingLimit ensures that comprehension nesting does not exceed the specified limit.
-//
-// This validator can be useful for preventing arbitrarily nested comprehensions which can take high polynomial
-// time to complete.
-//
-// Note, this limit does not apply to comprehensions with an empty iteration range, as these comprehensions have
-// no actual looping cost. The cel.bind() utilizes the comprehension structure to perform local variable
-// assignments and supplies an empty iteration range, so they won't count against the nesting limit either.
-func ValidateComprehensionNestingLimit(limit int) ASTValidator {
- return nestingLimitValidator{limit: limit}
-}
-
-type argChecker func(env *Env, call, arg ast.Expr) error
-
-func newFormatValidator(funcName string, argNum int, check argChecker) formatValidator {
- return formatValidator{
- funcName: funcName,
- check: check,
- argNum: argNum,
- }
-}
-
-type formatValidator struct {
- funcName string
- argNum int
- check argChecker
-}
-
-// Name returns the unique name of this function format validator.
-func (v formatValidator) Name() string {
- return fmt.Sprintf("cel.lib.std.validate.functions.%s", v.funcName)
-}
-
-// Validate searches the AST for uses of a given function name with a constant argument and performs a check
-// on whether the argument is a valid literal value.
-func (v formatValidator) Validate(e *Env, _ ValidatorConfig, a *ast.AST, iss *Issues) {
- root := ast.NavigateAST(a)
- funcCalls := ast.MatchDescendants(root, ast.FunctionMatcher(v.funcName))
- for _, call := range funcCalls {
- callArgs := call.AsCall().Args()
- if len(callArgs) <= v.argNum {
- continue
- }
- litArg := callArgs[v.argNum]
- if litArg.Kind() != ast.LiteralKind {
- continue
- }
- if err := v.check(e, call, litArg); err != nil {
- iss.ReportErrorAtID(litArg.ID(), "invalid %s argument", v.funcName)
- }
- }
-}
-
-func evalCall(env *Env, call, arg ast.Expr) error {
- ast := &Ast{impl: ast.NewAST(call, ast.NewSourceInfo(nil))}
- prg, err := env.Program(ast)
- if err != nil {
- return err
- }
- _, _, err = prg.Eval(NoVars())
- return err
-}
-
-func compileRegex(_ *Env, _, arg ast.Expr) error {
- pattern := arg.AsLiteral().Value().(string)
- _, err := regexp.Compile(pattern)
- return err
-}
-
-type homogeneousAggregateLiteralValidator struct{}
-
-// Name returns the unique name of the homogeneous type validator.
-func (homogeneousAggregateLiteralValidator) Name() string {
- return homogeneousValidatorName
-}
-
-// Validate validates that all lists and map literals have homogeneous types, i.e. don't contain dyn types.
-//
-// This validator makes an exception for list and map literals which occur at any level of nesting within
-// string format calls.
-func (v homogeneousAggregateLiteralValidator) Validate(_ *Env, c ValidatorConfig, a *ast.AST, iss *Issues) {
- var exemptedFunctions []string
- exemptedFunctions = c.GetOrDefault(HomogeneousAggregateLiteralExemptFunctions, exemptedFunctions).([]string)
- root := ast.NavigateAST(a)
- listExprs := ast.MatchDescendants(root, ast.KindMatcher(ast.ListKind))
- for _, listExpr := range listExprs {
- if inExemptFunction(listExpr, exemptedFunctions) {
- continue
- }
- l := listExpr.AsList()
- elements := l.Elements()
- optIndices := l.OptionalIndices()
- var elemType *Type
- for i, e := range elements {
- et := a.GetType(e.ID())
- if isOptionalIndex(i, optIndices) {
- et = et.Parameters()[0]
- }
- if elemType == nil {
- elemType = et
- continue
- }
- if !elemType.IsEquivalentType(et) {
- v.typeMismatch(iss, e.ID(), elemType, et)
- break
- }
- }
- }
- mapExprs := ast.MatchDescendants(root, ast.KindMatcher(ast.MapKind))
- for _, mapExpr := range mapExprs {
- if inExemptFunction(mapExpr, exemptedFunctions) {
- continue
- }
- m := mapExpr.AsMap()
- entries := m.Entries()
- var keyType, valType *Type
- for _, e := range entries {
- mapEntry := e.AsMapEntry()
- key, val := mapEntry.Key(), mapEntry.Value()
- kt, vt := a.GetType(key.ID()), a.GetType(val.ID())
- if mapEntry.IsOptional() {
- vt = vt.Parameters()[0]
- }
- if keyType == nil && valType == nil {
- keyType, valType = kt, vt
- continue
- }
- if !keyType.IsEquivalentType(kt) {
- v.typeMismatch(iss, key.ID(), keyType, kt)
- }
- if !valType.IsEquivalentType(vt) {
- v.typeMismatch(iss, val.ID(), valType, vt)
- }
- }
- }
-}
-
-func inExemptFunction(e ast.NavigableExpr, exemptFunctions []string) bool {
- parent, found := e.Parent()
- for found {
- if parent.Kind() == ast.CallKind {
- fnName := parent.AsCall().FunctionName()
- for _, exempt := range exemptFunctions {
- if exempt == fnName {
- return true
- }
- }
- }
- parent, found = parent.Parent()
- }
- return false
-}
-
-func isOptionalIndex(i int, optIndices []int32) bool {
- for _, optInd := range optIndices {
- if i == int(optInd) {
- return true
- }
- }
- return false
-}
-
-func (homogeneousAggregateLiteralValidator) typeMismatch(iss *Issues, id int64, expected, actual *Type) {
- iss.ReportErrorAtID(id, "expected type '%s' but found '%s'", FormatCELType(expected), FormatCELType(actual))
-}
-
-type nestingLimitValidator struct {
- limit int
-}
-
-func (v nestingLimitValidator) Name() string {
- return "cel.lib.std.validate.comprehension_nesting_limit"
-}
-
-func (v nestingLimitValidator) Validate(e *Env, _ ValidatorConfig, a *ast.AST, iss *Issues) {
- root := ast.NavigateAST(a)
- comprehensions := ast.MatchDescendants(root, ast.KindMatcher(ast.ComprehensionKind))
- if len(comprehensions) <= v.limit {
- return
- }
- for _, comp := range comprehensions {
- count := 0
- e := comp
- hasParent := true
- for hasParent {
- // When the expression is not a comprehension, continue to the next ancestor.
- if e.Kind() != ast.ComprehensionKind {
- e, hasParent = e.Parent()
- continue
- }
- // When the comprehension has an empty range, continue to the next ancestor
- // as this comprehension does not have any associated cost.
- iterRange := e.AsComprehension().IterRange()
- if iterRange.Kind() == ast.ListKind && iterRange.AsList().Size() == 0 {
- e, hasParent = e.Parent()
- continue
- }
- // Otherwise check the nesting limit.
- count++
- if count > v.limit {
- iss.ReportErrorAtID(comp.ID(), "comprehension exceeds nesting limit")
- break
- }
- e, hasParent = e.Parent()
- }
- }
-}
diff --git a/vendor/github.com/google/cel-go/checker/BUILD.bazel b/vendor/github.com/google/cel-go/checker/BUILD.bazel
deleted file mode 100644
index 997fa91d1..000000000
--- a/vendor/github.com/google/cel-go/checker/BUILD.bazel
+++ /dev/null
@@ -1,65 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "checker.go",
- "cost.go",
- "env.go",
- "errors.go",
- "format.go",
- "mapping.go",
- "options.go",
- "printer.go",
- "scopes.go",
- "standard.go",
- "types.go",
- ],
- importpath = "github.com/google/cel-go/checker",
- visibility = ["//visibility:public"],
- deps = [
- "//checker/decls:go_default_library",
- "//common:go_default_library",
- "//common/ast:go_default_library",
- "//common/containers:go_default_library",
- "//common/debug:go_default_library",
- "//common/decls:go_default_library",
- "//common/operators:go_default_library",
- "//common/overloads:go_default_library",
- "//common/stdlib:go_default_library",
- "//common/types:go_default_library",
- "//common/types/pb:go_default_library",
- "//common/types/ref:go_default_library",
- "//parser:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//types/known/emptypb:go_default_library",
- "@org_golang_google_protobuf//types/known/structpb:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = [
- "checker_test.go",
- "cost_test.go",
- "env_test.go",
- "format_test.go",
- ],
- embed = [
- ":go_default_library",
- ],
- deps = [
- "//common/types:go_default_library",
- "//parser:go_default_library",
- "//test:go_default_library",
- "//test/proto2pb:go_default_library",
- "//test/proto3pb:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/checker/checker.go b/vendor/github.com/google/cel-go/checker/checker.go
deleted file mode 100644
index 57fb3ce5e..000000000
--- a/vendor/github.com/google/cel-go/checker/checker.go
+++ /dev/null
@@ -1,696 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package checker defines functions to type-checked a parsed expression
-// against a set of identifier and function declarations.
-package checker
-
-import (
- "fmt"
- "reflect"
-
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/decls"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-type checker struct {
- *ast.AST
- ast.ExprFactory
- env *Env
- errors *typeErrors
- mappings *mapping
- freeTypeVarCounter int
-}
-
-// Check performs type checking, giving a typed AST.
-//
-// The input is a parsed AST and an env which encapsulates type binding of variables,
-// declarations of built-in functions, descriptions of protocol buffers, and a registry for
-// errors.
-//
-// Returns a type-checked AST, which might not be usable if there are errors in the error
-// registry.
-func Check(parsed *ast.AST, source common.Source, env *Env) (*ast.AST, *common.Errors) {
- errs := common.NewErrors(source)
- typeMap := make(map[int64]*types.Type)
- refMap := make(map[int64]*ast.ReferenceInfo)
- c := checker{
- AST: ast.NewCheckedAST(parsed, typeMap, refMap),
- ExprFactory: ast.NewExprFactory(),
- env: env,
- errors: &typeErrors{errs: errs},
- mappings: newMapping(),
- freeTypeVarCounter: 0,
- }
- c.check(c.Expr())
-
- // Walk over the final type map substituting any type parameters either by their bound value
- // or by DYN.
- for id, t := range c.TypeMap() {
- c.SetType(id, substitute(c.mappings, t, true))
- }
- return c.AST, errs
-}
-
-func (c *checker) check(e ast.Expr) {
- if e == nil {
- return
- }
- switch e.Kind() {
- case ast.LiteralKind:
- literal := ref.Val(e.AsLiteral())
- switch literal.Type() {
- case types.BoolType, types.BytesType, types.DoubleType, types.IntType,
- types.NullType, types.StringType, types.UintType:
- c.setType(e, literal.Type().(*types.Type))
- default:
- c.errors.unexpectedASTType(e.ID(), c.location(e), "literal", literal.Type().TypeName())
- }
- case ast.IdentKind:
- c.checkIdent(e)
- case ast.SelectKind:
- c.checkSelect(e)
- case ast.CallKind:
- c.checkCall(e)
- case ast.ListKind:
- c.checkCreateList(e)
- case ast.MapKind:
- c.checkCreateMap(e)
- case ast.StructKind:
- c.checkCreateStruct(e)
- case ast.ComprehensionKind:
- c.checkComprehension(e)
- default:
- c.errors.unexpectedASTType(e.ID(), c.location(e), "unspecified", reflect.TypeOf(e).Name())
- }
-}
-
-func (c *checker) checkIdent(e ast.Expr) {
- identName := e.AsIdent()
- // Check to see if the identifier is declared.
- if ident := c.env.LookupIdent(identName); ident != nil {
- c.setType(e, ident.Type())
- c.setReference(e, ast.NewIdentReference(ident.Name(), ident.Value()))
- // Overwrite the identifier with its fully qualified name.
- e.SetKindCase(c.NewIdent(e.ID(), ident.Name()))
- return
- }
-
- c.setType(e, types.ErrorType)
- c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), identName)
-}
-
-func (c *checker) checkSelect(e ast.Expr) {
- sel := e.AsSelect()
- // Before traversing down the tree, try to interpret as qualified name.
- qname, found := containers.ToQualifiedName(e)
- if found {
- ident := c.env.LookupIdent(qname)
- if ident != nil {
- // We don't check for a TestOnly expression here since the `found` result is
- // always going to be false for TestOnly expressions.
-
- // Rewrite the node to be a variable reference to the resolved fully-qualified
- // variable name.
- c.setType(e, ident.Type())
- c.setReference(e, ast.NewIdentReference(ident.Name(), ident.Value()))
- e.SetKindCase(c.NewIdent(e.ID(), ident.Name()))
- return
- }
- }
-
- resultType := c.checkSelectField(e, sel.Operand(), sel.FieldName(), false)
- if sel.IsTestOnly() {
- resultType = types.BoolType
- }
- c.setType(e, substitute(c.mappings, resultType, false))
-}
-
-func (c *checker) checkOptSelect(e ast.Expr) {
- // Collect metadata related to the opt select call packaged by the parser.
- call := e.AsCall()
- operand := call.Args()[0]
- field := call.Args()[1]
- fieldName, isString := maybeUnwrapString(field)
- if !isString {
- c.errors.notAnOptionalFieldSelection(field.ID(), c.location(field), field)
- return
- }
-
- // Perform type-checking using the field selection logic.
- resultType := c.checkSelectField(e, operand, fieldName, true)
- c.setType(e, substitute(c.mappings, resultType, false))
- c.setReference(e, ast.NewFunctionReference("select_optional_field"))
-}
-
-func (c *checker) checkSelectField(e, operand ast.Expr, field string, optional bool) *types.Type {
- // Interpret as field selection, first traversing down the operand.
- c.check(operand)
- operandType := substitute(c.mappings, c.getType(operand), false)
-
- // If the target type is 'optional', unwrap it for the sake of this check.
- targetType, isOpt := maybeUnwrapOptional(operandType)
-
- // Assume error type by default as most types do not support field selection.
- resultType := types.ErrorType
- switch targetType.Kind() {
- case types.MapKind:
- // Maps yield their value type as the selection result type.
- resultType = targetType.Parameters()[1]
- case types.StructKind:
- // Objects yield their field type declaration as the selection result type, but only if
- // the field is defined.
- messageType := targetType
- if fieldType, found := c.lookupFieldType(e.ID(), messageType.TypeName(), field); found {
- resultType = fieldType
- }
- case types.TypeParamKind:
- // Set the operand type to DYN to prevent assignment to a potentially incorrect type
- // at a later point in type-checking. The isAssignable call will update the type
- // substitutions for the type param under the covers.
- c.isAssignable(types.DynType, targetType)
- // Also, set the result type to DYN.
- resultType = types.DynType
- default:
- // Dynamic / error values are treated as DYN type. Errors are handled this way as well
- // in order to allow forward progress on the check.
- if !isDynOrError(targetType) {
- c.errors.typeDoesNotSupportFieldSelection(e.ID(), c.location(e), targetType)
- }
- resultType = types.DynType
- }
-
- // If the target type was optional coming in, then the result must be optional going out.
- if isOpt || optional {
- return types.NewOptionalType(resultType)
- }
- return resultType
-}
-
-func (c *checker) checkCall(e ast.Expr) {
- // Note: similar logic exists within the `interpreter/planner.go`. If making changes here
- // please consider the impact on planner.go and consolidate implementations or mirror code
- // as appropriate.
- call := e.AsCall()
- fnName := call.FunctionName()
- if fnName == operators.OptSelect {
- c.checkOptSelect(e)
- return
- }
-
- args := call.Args()
- // Traverse arguments.
- for _, arg := range args {
- c.check(arg)
- }
-
- // Regular static call with simple name.
- if !call.IsMemberFunction() {
- // Check for the existence of the function.
- fn := c.env.LookupFunction(fnName)
- if fn == nil {
- c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), fnName)
- c.setType(e, types.ErrorType)
- return
- }
- // Overwrite the function name with its fully qualified resolved name.
- e.SetKindCase(c.NewCall(e.ID(), fn.Name(), args...))
- // Check to see whether the overload resolves.
- c.resolveOverloadOrError(e, fn, nil, args)
- return
- }
-
- // If a receiver 'target' is present, it may either be a receiver function, or a namespaced
- // function, but not both. Given a.b.c() either a.b.c is a function or c is a function with
- // target a.b.
- //
- // Check whether the target is a namespaced function name.
- target := call.Target()
- qualifiedPrefix, maybeQualified := containers.ToQualifiedName(target)
- if maybeQualified {
- maybeQualifiedName := qualifiedPrefix + "." + fnName
- fn := c.env.LookupFunction(maybeQualifiedName)
- if fn != nil {
- // The function name is namespaced and so preserving the target operand would
- // be an inaccurate representation of the desired evaluation behavior.
- // Overwrite with fully-qualified resolved function name sans receiver target.
- e.SetKindCase(c.NewCall(e.ID(), fn.Name(), args...))
- c.resolveOverloadOrError(e, fn, nil, args)
- return
- }
- }
-
- // Regular instance call.
- c.check(target)
- fn := c.env.LookupFunction(fnName)
- // Function found, attempt overload resolution.
- if fn != nil {
- c.resolveOverloadOrError(e, fn, target, args)
- return
- }
- // Function name not declared, record error.
- c.setType(e, types.ErrorType)
- c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), fnName)
-}
-
-func (c *checker) resolveOverloadOrError(
- e ast.Expr, fn *decls.FunctionDecl, target ast.Expr, args []ast.Expr) {
- // Attempt to resolve the overload.
- resolution := c.resolveOverload(e, fn, target, args)
- // No such overload, error noted in the resolveOverload call, type recorded here.
- if resolution == nil {
- c.setType(e, types.ErrorType)
- return
- }
- // Overload found.
- c.setType(e, resolution.Type)
- c.setReference(e, resolution.Reference)
-}
-
-func (c *checker) resolveOverload(
- call ast.Expr, fn *decls.FunctionDecl, target ast.Expr, args []ast.Expr) *overloadResolution {
-
- var argTypes []*types.Type
- if target != nil {
- argTypes = append(argTypes, c.getType(target))
- }
- for _, arg := range args {
- argTypes = append(argTypes, c.getType(arg))
- }
-
- var resultType *types.Type
- var checkedRef *ast.ReferenceInfo
- for _, overload := range fn.OverloadDecls() {
- // Determine whether the overload is currently considered.
- if c.env.isOverloadDisabled(overload.ID()) {
- continue
- }
-
- // Ensure the call style for the overload matches.
- if (target == nil && overload.IsMemberFunction()) ||
- (target != nil && !overload.IsMemberFunction()) {
- // not a compatible call style.
- continue
- }
-
- // Alternative type-checking behavior when the logical operators are compacted into
- // variadic AST representations.
- if fn.Name() == operators.LogicalAnd || fn.Name() == operators.LogicalOr {
- checkedRef = ast.NewFunctionReference(overload.ID())
- for i, argType := range argTypes {
- if !c.isAssignable(argType, types.BoolType) {
- c.errors.typeMismatch(
- args[i].ID(),
- c.locationByID(args[i].ID()),
- types.BoolType,
- argType)
- resultType = types.ErrorType
- }
- }
- if isError(resultType) {
- return nil
- }
- return newResolution(checkedRef, types.BoolType)
- }
-
- overloadType := newFunctionType(overload.ResultType(), overload.ArgTypes()...)
- typeParams := overload.TypeParams()
- if len(typeParams) != 0 {
- // Instantiate overload's type with fresh type variables.
- substitutions := newMapping()
- for _, typePar := range typeParams {
- substitutions.add(types.NewTypeParamType(typePar), c.newTypeVar())
- }
- overloadType = substitute(substitutions, overloadType, false)
- }
-
- candidateArgTypes := overloadType.Parameters()[1:]
- if c.isAssignableList(argTypes, candidateArgTypes) {
- if checkedRef == nil {
- checkedRef = ast.NewFunctionReference(overload.ID())
- } else {
- checkedRef.AddOverload(overload.ID())
- }
-
- // First matching overload, determines result type.
- fnResultType := substitute(c.mappings, overloadType.Parameters()[0], false)
- if resultType == nil {
- resultType = fnResultType
- } else if !isDyn(resultType) && !fnResultType.IsExactType(resultType) {
- resultType = types.DynType
- }
- }
- }
-
- if resultType == nil {
- for i, argType := range argTypes {
- argTypes[i] = substitute(c.mappings, argType, true)
- }
- c.errors.noMatchingOverload(call.ID(), c.location(call), fn.Name(), argTypes, target != nil)
- return nil
- }
-
- return newResolution(checkedRef, resultType)
-}
-
-func (c *checker) checkCreateList(e ast.Expr) {
- create := e.AsList()
- var elemsType *types.Type
- optionalIndices := create.OptionalIndices()
- optionals := make(map[int32]bool, len(optionalIndices))
- for _, optInd := range optionalIndices {
- optionals[optInd] = true
- }
- for i, e := range create.Elements() {
- c.check(e)
- elemType := c.getType(e)
- if optionals[int32(i)] {
- var isOptional bool
- elemType, isOptional = maybeUnwrapOptional(elemType)
- if !isOptional && !isDyn(elemType) {
- c.errors.typeMismatch(e.ID(), c.location(e), types.NewOptionalType(elemType), elemType)
- }
- }
- elemsType = c.joinTypes(e, elemsType, elemType)
- }
- if elemsType == nil {
- // If the list is empty, assign free type var to elem type.
- elemsType = c.newTypeVar()
- }
- c.setType(e, types.NewListType(elemsType))
-}
-
-func (c *checker) checkCreateMap(e ast.Expr) {
- mapVal := e.AsMap()
- var mapKeyType *types.Type
- var mapValueType *types.Type
- for _, e := range mapVal.Entries() {
- entry := e.AsMapEntry()
- key := entry.Key()
- c.check(key)
- mapKeyType = c.joinTypes(key, mapKeyType, c.getType(key))
-
- val := entry.Value()
- c.check(val)
- valType := c.getType(val)
- if entry.IsOptional() {
- var isOptional bool
- valType, isOptional = maybeUnwrapOptional(valType)
- if !isOptional && !isDyn(valType) {
- c.errors.typeMismatch(val.ID(), c.location(val), types.NewOptionalType(valType), valType)
- }
- }
- mapValueType = c.joinTypes(val, mapValueType, valType)
- }
- if mapKeyType == nil {
- // If the map is empty, assign free type variables to typeKey and value type.
- mapKeyType = c.newTypeVar()
- mapValueType = c.newTypeVar()
- }
- c.setType(e, types.NewMapType(mapKeyType, mapValueType))
-}
-
-func (c *checker) checkCreateStruct(e ast.Expr) {
- msgVal := e.AsStruct()
- // Determine the type of the message.
- resultType := types.ErrorType
- ident := c.env.LookupIdent(msgVal.TypeName())
- if ident == nil {
- c.errors.undeclaredReference(
- e.ID(), c.location(e), c.env.container.Name(), msgVal.TypeName())
- c.setType(e, types.ErrorType)
- return
- }
- // Ensure the type name is fully qualified in the AST.
- typeName := ident.Name()
- if msgVal.TypeName() != typeName {
- e.SetKindCase(c.NewStruct(e.ID(), typeName, msgVal.Fields()))
- msgVal = e.AsStruct()
- }
- c.setReference(e, ast.NewIdentReference(typeName, nil))
- identKind := ident.Type().Kind()
- if identKind != types.ErrorKind {
- if identKind != types.TypeKind {
- c.errors.notAType(e.ID(), c.location(e), ident.Type().DeclaredTypeName())
- } else {
- resultType = ident.Type().Parameters()[0]
- // Backwards compatibility test between well-known types and message types
- // In this context, the type is being instantiated by its protobuf name which
- // is not ideal or recommended, but some users expect this to work.
- if isWellKnownType(resultType) {
- typeName = getWellKnownTypeName(resultType)
- } else if resultType.Kind() == types.StructKind {
- typeName = resultType.DeclaredTypeName()
- } else {
- c.errors.notAMessageType(e.ID(), c.location(e), resultType.DeclaredTypeName())
- resultType = types.ErrorType
- }
- }
- }
- c.setType(e, resultType)
-
- // Check the field initializers.
- for _, f := range msgVal.Fields() {
- field := f.AsStructField()
- fieldName := field.Name()
- value := field.Value()
- c.check(value)
-
- fieldType := types.ErrorType
- ft, found := c.lookupFieldType(f.ID(), typeName, fieldName)
- if found {
- fieldType = ft
- }
-
- valType := c.getType(value)
- if field.IsOptional() {
- var isOptional bool
- valType, isOptional = maybeUnwrapOptional(valType)
- if !isOptional && !isDyn(valType) {
- c.errors.typeMismatch(value.ID(), c.location(value), types.NewOptionalType(valType), valType)
- }
- }
- if !c.isAssignable(fieldType, valType) {
- c.errors.fieldTypeMismatch(f.ID(), c.locationByID(f.ID()), fieldName, fieldType, valType)
- }
- }
-}
-
-func (c *checker) checkComprehension(e ast.Expr) {
- comp := e.AsComprehension()
- c.check(comp.IterRange())
- c.check(comp.AccuInit())
- accuType := c.getType(comp.AccuInit())
- rangeType := substitute(c.mappings, c.getType(comp.IterRange()), false)
- var varType *types.Type
-
- switch rangeType.Kind() {
- case types.ListKind:
- varType = rangeType.Parameters()[0]
- case types.MapKind:
- // Ranges over the keys.
- varType = rangeType.Parameters()[0]
- case types.DynKind, types.ErrorKind, types.TypeParamKind:
- // Set the range type to DYN to prevent assignment to a potentially incorrect type
- // at a later point in type-checking. The isAssignable call will update the type
- // substitutions for the type param under the covers.
- c.isAssignable(types.DynType, rangeType)
- // Set the range iteration variable to type DYN as well.
- varType = types.DynType
- default:
- c.errors.notAComprehensionRange(comp.IterRange().ID(), c.location(comp.IterRange()), rangeType)
- varType = types.ErrorType
- }
-
- // Create a scope for the comprehension since it has a local accumulation variable.
- // This scope will contain the accumulation variable used to compute the result.
- c.env = c.env.enterScope()
- c.env.AddIdents(decls.NewVariable(comp.AccuVar(), accuType))
- // Create a block scope for the loop.
- c.env = c.env.enterScope()
- c.env.AddIdents(decls.NewVariable(comp.IterVar(), varType))
- // Check the variable references in the condition and step.
- c.check(comp.LoopCondition())
- c.assertType(comp.LoopCondition(), types.BoolType)
- c.check(comp.LoopStep())
- c.assertType(comp.LoopStep(), accuType)
- // Exit the loop's block scope before checking the result.
- c.env = c.env.exitScope()
- c.check(comp.Result())
- // Exit the comprehension scope.
- c.env = c.env.exitScope()
- c.setType(e, substitute(c.mappings, c.getType(comp.Result()), false))
-}
-
-// Checks compatibility of joined types, and returns the most general common type.
-func (c *checker) joinTypes(e ast.Expr, previous, current *types.Type) *types.Type {
- if previous == nil {
- return current
- }
- if c.isAssignable(previous, current) {
- return mostGeneral(previous, current)
- }
- if c.dynAggregateLiteralElementTypesEnabled() {
- return types.DynType
- }
- c.errors.typeMismatch(e.ID(), c.location(e), previous, current)
- return types.ErrorType
-}
-
-func (c *checker) dynAggregateLiteralElementTypesEnabled() bool {
- return c.env.aggLitElemType == dynElementType
-}
-
-func (c *checker) newTypeVar() *types.Type {
- id := c.freeTypeVarCounter
- c.freeTypeVarCounter++
- return types.NewTypeParamType(fmt.Sprintf("_var%d", id))
-}
-
-func (c *checker) isAssignable(t1, t2 *types.Type) bool {
- subs := isAssignable(c.mappings, t1, t2)
- if subs != nil {
- c.mappings = subs
- return true
- }
-
- return false
-}
-
-func (c *checker) isAssignableList(l1, l2 []*types.Type) bool {
- subs := isAssignableList(c.mappings, l1, l2)
- if subs != nil {
- c.mappings = subs
- return true
- }
-
- return false
-}
-
-func maybeUnwrapString(e ast.Expr) (string, bool) {
- switch e.Kind() {
- case ast.LiteralKind:
- literal := e.AsLiteral()
- switch v := literal.(type) {
- case types.String:
- return string(v), true
- }
- }
- return "", false
-}
-
-func (c *checker) setType(e ast.Expr, t *types.Type) {
- if old, found := c.TypeMap()[e.ID()]; found && !old.IsExactType(t) {
- c.errors.incompatibleType(e.ID(), c.location(e), e, old, t)
- return
- }
- c.SetType(e.ID(), t)
-}
-
-func (c *checker) getType(e ast.Expr) *types.Type {
- return c.TypeMap()[e.ID()]
-}
-
-func (c *checker) setReference(e ast.Expr, r *ast.ReferenceInfo) {
- if old, found := c.ReferenceMap()[e.ID()]; found && !old.Equals(r) {
- c.errors.referenceRedefinition(e.ID(), c.location(e), e, old, r)
- return
- }
- c.SetReference(e.ID(), r)
-}
-
-func (c *checker) assertType(e ast.Expr, t *types.Type) {
- if !c.isAssignable(t, c.getType(e)) {
- c.errors.typeMismatch(e.ID(), c.location(e), t, c.getType(e))
- }
-}
-
-type overloadResolution struct {
- Type *types.Type
- Reference *ast.ReferenceInfo
-}
-
-func newResolution(r *ast.ReferenceInfo, t *types.Type) *overloadResolution {
- return &overloadResolution{
- Reference: r,
- Type: t,
- }
-}
-
-func (c *checker) location(e ast.Expr) common.Location {
- return c.locationByID(e.ID())
-}
-
-func (c *checker) locationByID(id int64) common.Location {
- return c.SourceInfo().GetStartLocation(id)
-}
-
-func (c *checker) lookupFieldType(exprID int64, structType, fieldName string) (*types.Type, bool) {
- if _, found := c.env.provider.FindStructType(structType); !found {
- // This should not happen, anyway, report an error.
- c.errors.unexpectedFailedResolution(exprID, c.locationByID(exprID), structType)
- return nil, false
- }
-
- if ft, found := c.env.provider.FindStructFieldType(structType, fieldName); found {
- return ft.Type, found
- }
-
- c.errors.undefinedField(exprID, c.locationByID(exprID), fieldName)
- return nil, false
-}
-
-func isWellKnownType(t *types.Type) bool {
- switch t.Kind() {
- case types.AnyKind, types.TimestampKind, types.DurationKind, types.DynKind, types.NullTypeKind:
- return true
- case types.BoolKind, types.BytesKind, types.DoubleKind, types.IntKind, types.StringKind, types.UintKind:
- return t.IsAssignableType(types.NullType)
- case types.ListKind:
- return t.Parameters()[0] == types.DynType
- case types.MapKind:
- return t.Parameters()[0] == types.StringType && t.Parameters()[1] == types.DynType
- }
- return false
-}
-
-func getWellKnownTypeName(t *types.Type) string {
- if name, found := wellKnownTypes[t.Kind()]; found {
- return name
- }
- return ""
-}
-
-var (
- wellKnownTypes = map[types.Kind]string{
- types.AnyKind: "google.protobuf.Any",
- types.BoolKind: "google.protobuf.BoolValue",
- types.BytesKind: "google.protobuf.BytesValue",
- types.DoubleKind: "google.protobuf.DoubleValue",
- types.DurationKind: "google.protobuf.Duration",
- types.DynKind: "google.protobuf.Value",
- types.IntKind: "google.protobuf.Int64Value",
- types.ListKind: "google.protobuf.ListValue",
- types.NullTypeKind: "google.protobuf.NullValue",
- types.MapKind: "google.protobuf.Struct",
- types.StringKind: "google.protobuf.StringValue",
- types.TimestampKind: "google.protobuf.Timestamp",
- types.UintKind: "google.protobuf.UInt64Value",
- }
-)
diff --git a/vendor/github.com/google/cel-go/checker/cost.go b/vendor/github.com/google/cel-go/checker/cost.go
deleted file mode 100644
index 3470d0a3f..000000000
--- a/vendor/github.com/google/cel-go/checker/cost.go
+++ /dev/null
@@ -1,702 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "math"
-
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/parser"
-)
-
-// WARNING: Any changes to cost calculations in this file require a corresponding change in interpreter/runtimecost.go
-
-// CostEstimator estimates the sizes of variable length input data and the costs of functions.
-type CostEstimator interface {
- // EstimateSize returns a SizeEstimate for the given AstNode, or nil if
- // the estimator has no estimate to provide. The size is equivalent to the result of the CEL `size()` function:
- // length of strings and bytes, number of map entries or number of list items.
- // EstimateSize is only called for AstNodes where
- // CEL does not know the size; EstimateSize is not called for values defined inline in CEL where the size
- // is already obvious to CEL.
- EstimateSize(element AstNode) *SizeEstimate
- // EstimateCallCost returns the estimated cost of an invocation, or nil if
- // the estimator has no estimate to provide.
- EstimateCallCost(function, overloadID string, target *AstNode, args []AstNode) *CallEstimate
-}
-
-// CallEstimate includes a CostEstimate for the call, and an optional estimate of the result object size.
-// The ResultSize should only be provided if the call results in a map, list, string or bytes.
-type CallEstimate struct {
- CostEstimate
- ResultSize *SizeEstimate
-}
-
-// AstNode represents an AST node for the purpose of cost estimations.
-type AstNode interface {
- // Path returns a field path through the provided type declarations to the type of the AstNode, or nil if the AstNode does not
- // represent type directly reachable from the provided type declarations.
- // The first path element is a variable. All subsequent path elements are one of: field name, '@items', '@keys', '@values'.
- Path() []string
- // Type returns the deduced type of the AstNode.
- Type() *types.Type
- // Expr returns the expression of the AstNode.
- Expr() ast.Expr
- // ComputedSize returns a size estimate of the AstNode derived from information available in the CEL expression.
- // For constants and inline list and map declarations, the exact size is returned. For concatenated list, strings
- // and bytes, the size is derived from the size estimates of the operands. nil is returned if there is no
- // computed size available.
- ComputedSize() *SizeEstimate
-}
-
-type astNode struct {
- path []string
- t *types.Type
- expr ast.Expr
- derivedSize *SizeEstimate
-}
-
-func (e astNode) Path() []string {
- return e.path
-}
-
-func (e astNode) Type() *types.Type {
- return e.t
-}
-
-func (e astNode) Expr() ast.Expr {
- return e.expr
-}
-
-func (e astNode) ComputedSize() *SizeEstimate {
- if e.derivedSize != nil {
- return e.derivedSize
- }
- var v uint64
- switch e.expr.Kind() {
- case ast.LiteralKind:
- switch ck := e.expr.AsLiteral().(type) {
- case types.String:
- // converting to runes here is an O(n) operation, but
- // this is consistent with how size is computed at runtime,
- // and how the language definition defines string size
- v = uint64(len([]rune(ck)))
- case types.Bytes:
- v = uint64(len(ck))
- case types.Bool, types.Double, types.Duration,
- types.Int, types.Timestamp, types.Uint,
- types.Null:
- v = uint64(1)
- default:
- return nil
- }
- case ast.ListKind:
- v = uint64(e.expr.AsList().Size())
- case ast.MapKind:
- v = uint64(e.expr.AsMap().Size())
- default:
- return nil
- }
-
- return &SizeEstimate{Min: v, Max: v}
-}
-
-// SizeEstimate represents an estimated size of a variable length string, bytes, map or list.
-type SizeEstimate struct {
- Min, Max uint64
-}
-
-// Add adds to another SizeEstimate and returns the sum.
-// If add would result in an uint64 overflow, the result is math.MaxUint64.
-func (se SizeEstimate) Add(sizeEstimate SizeEstimate) SizeEstimate {
- return SizeEstimate{
- addUint64NoOverflow(se.Min, sizeEstimate.Min),
- addUint64NoOverflow(se.Max, sizeEstimate.Max),
- }
-}
-
-// Multiply multiplies by another SizeEstimate and returns the product.
-// If multiply would result in an uint64 overflow, the result is math.MaxUint64.
-func (se SizeEstimate) Multiply(sizeEstimate SizeEstimate) SizeEstimate {
- return SizeEstimate{
- multiplyUint64NoOverflow(se.Min, sizeEstimate.Min),
- multiplyUint64NoOverflow(se.Max, sizeEstimate.Max),
- }
-}
-
-// MultiplyByCostFactor multiplies a SizeEstimate by a cost factor and returns the CostEstimate with the
-// nearest integer of the result, rounded up.
-func (se SizeEstimate) MultiplyByCostFactor(costPerUnit float64) CostEstimate {
- return CostEstimate{
- multiplyByCostFactor(se.Min, costPerUnit),
- multiplyByCostFactor(se.Max, costPerUnit),
- }
-}
-
-// MultiplyByCost multiplies by the cost and returns the product.
-// If multiply would result in an uint64 overflow, the result is math.MaxUint64.
-func (se SizeEstimate) MultiplyByCost(cost CostEstimate) CostEstimate {
- return CostEstimate{
- multiplyUint64NoOverflow(se.Min, cost.Min),
- multiplyUint64NoOverflow(se.Max, cost.Max),
- }
-}
-
-// Union returns a SizeEstimate that encompasses both input the SizeEstimate.
-func (se SizeEstimate) Union(size SizeEstimate) SizeEstimate {
- result := se
- if size.Min < result.Min {
- result.Min = size.Min
- }
- if size.Max > result.Max {
- result.Max = size.Max
- }
- return result
-}
-
-// CostEstimate represents an estimated cost range and provides add and multiply operations
-// that do not overflow.
-type CostEstimate struct {
- Min, Max uint64
-}
-
-// Add adds the costs and returns the sum.
-// If add would result in an uint64 overflow for the min or max, the value is set to math.MaxUint64.
-func (ce CostEstimate) Add(cost CostEstimate) CostEstimate {
- return CostEstimate{
- addUint64NoOverflow(ce.Min, cost.Min),
- addUint64NoOverflow(ce.Max, cost.Max),
- }
-}
-
-// Multiply multiplies by the cost and returns the product.
-// If multiply would result in an uint64 overflow, the result is math.MaxUint64.
-func (ce CostEstimate) Multiply(cost CostEstimate) CostEstimate {
- return CostEstimate{
- multiplyUint64NoOverflow(ce.Min, cost.Min),
- multiplyUint64NoOverflow(ce.Max, cost.Max),
- }
-}
-
-// MultiplyByCostFactor multiplies a CostEstimate by a cost factor and returns the CostEstimate with the
-// nearest integer of the result, rounded up.
-func (ce CostEstimate) MultiplyByCostFactor(costPerUnit float64) CostEstimate {
- return CostEstimate{
- multiplyByCostFactor(ce.Min, costPerUnit),
- multiplyByCostFactor(ce.Max, costPerUnit),
- }
-}
-
-// Union returns a CostEstimate that encompasses both input the CostEstimates.
-func (ce CostEstimate) Union(size CostEstimate) CostEstimate {
- result := ce
- if size.Min < result.Min {
- result.Min = size.Min
- }
- if size.Max > result.Max {
- result.Max = size.Max
- }
- return result
-}
-
-// addUint64NoOverflow adds non-negative ints. If the result is exceeds math.MaxUint64, math.MaxUint64
-// is returned.
-func addUint64NoOverflow(x, y uint64) uint64 {
- if y > 0 && x > math.MaxUint64-y {
- return math.MaxUint64
- }
- return x + y
-}
-
-// multiplyUint64NoOverflow multiplies non-negative ints. If the result is exceeds math.MaxUint64, math.MaxUint64
-// is returned.
-func multiplyUint64NoOverflow(x, y uint64) uint64 {
- if y != 0 && x > math.MaxUint64/y {
- return math.MaxUint64
- }
- return x * y
-}
-
-// multiplyByFactor multiplies an integer by a cost factor float and returns the nearest integer value, rounded up.
-func multiplyByCostFactor(x uint64, y float64) uint64 {
- xFloat := float64(x)
- if xFloat > 0 && y > 0 && xFloat > math.MaxUint64/y {
- return math.MaxUint64
- }
- ceil := math.Ceil(xFloat * y)
- if ceil >= doubleTwoTo64 {
- return math.MaxUint64
- }
- return uint64(ceil)
-}
-
-var (
- selectAndIdentCost = CostEstimate{Min: common.SelectAndIdentCost, Max: common.SelectAndIdentCost}
- constCost = CostEstimate{Min: common.ConstCost, Max: common.ConstCost}
-
- createListBaseCost = CostEstimate{Min: common.ListCreateBaseCost, Max: common.ListCreateBaseCost}
- createMapBaseCost = CostEstimate{Min: common.MapCreateBaseCost, Max: common.MapCreateBaseCost}
- createMessageBaseCost = CostEstimate{Min: common.StructCreateBaseCost, Max: common.StructCreateBaseCost}
-)
-
-type coster struct {
- // exprPath maps from Expr Id to field path.
- exprPath map[int64][]string
- // iterRanges tracks the iterRange of each iterVar.
- iterRanges iterRangeScopes
- // computedSizes tracks the computed sizes of call results.
- computedSizes map[int64]SizeEstimate
- checkedAST *ast.AST
- estimator CostEstimator
- overloadEstimators map[string]FunctionEstimator
- // presenceTestCost will either be a zero or one based on whether has() macros count against cost computations.
- presenceTestCost CostEstimate
-}
-
-// Use a stack of iterVar -> iterRange Expr Ids to handle shadowed variable names.
-type iterRangeScopes map[string][]int64
-
-func (vs iterRangeScopes) push(varName string, expr ast.Expr) {
- vs[varName] = append(vs[varName], expr.ID())
-}
-
-func (vs iterRangeScopes) pop(varName string) {
- varStack := vs[varName]
- vs[varName] = varStack[:len(varStack)-1]
-}
-
-func (vs iterRangeScopes) peek(varName string) (int64, bool) {
- varStack := vs[varName]
- if len(varStack) > 0 {
- return varStack[len(varStack)-1], true
- }
- return 0, false
-}
-
-// CostOption configures flags which affect cost computations.
-type CostOption func(*coster) error
-
-// PresenceTestHasCost determines whether presence testing has a cost of one or zero.
-//
-// Defaults to presence test has a cost of one.
-func PresenceTestHasCost(hasCost bool) CostOption {
- return func(c *coster) error {
- if hasCost {
- c.presenceTestCost = selectAndIdentCost
- return nil
- }
- c.presenceTestCost = CostEstimate{Min: 0, Max: 0}
- return nil
- }
-}
-
-// FunctionEstimator provides a CallEstimate given the target and arguments for a specific function, overload pair.
-type FunctionEstimator func(estimator CostEstimator, target *AstNode, args []AstNode) *CallEstimate
-
-// OverloadCostEstimate binds a FunctionCoster to a specific function overload ID.
-//
-// When a OverloadCostEstimate is provided, it will override the cost calculation of the CostEstimator provided to
-// the Cost() call.
-func OverloadCostEstimate(overloadID string, functionCoster FunctionEstimator) CostOption {
- return func(c *coster) error {
- c.overloadEstimators[overloadID] = functionCoster
- return nil
- }
-}
-
-// Cost estimates the cost of the parsed and type checked CEL expression.
-func Cost(checked *ast.AST, estimator CostEstimator, opts ...CostOption) (CostEstimate, error) {
- c := &coster{
- checkedAST: checked,
- estimator: estimator,
- overloadEstimators: map[string]FunctionEstimator{},
- exprPath: map[int64][]string{},
- iterRanges: map[string][]int64{},
- computedSizes: map[int64]SizeEstimate{},
- presenceTestCost: CostEstimate{Min: 1, Max: 1},
- }
- for _, opt := range opts {
- err := opt(c)
- if err != nil {
- return CostEstimate{}, err
- }
- }
- return c.cost(checked.Expr()), nil
-}
-
-func (c *coster) cost(e ast.Expr) CostEstimate {
- if e == nil {
- return CostEstimate{}
- }
- var cost CostEstimate
- switch e.Kind() {
- case ast.LiteralKind:
- cost = constCost
- case ast.IdentKind:
- cost = c.costIdent(e)
- case ast.SelectKind:
- cost = c.costSelect(e)
- case ast.CallKind:
- cost = c.costCall(e)
- case ast.ListKind:
- cost = c.costCreateList(e)
- case ast.MapKind:
- cost = c.costCreateMap(e)
- case ast.StructKind:
- cost = c.costCreateStruct(e)
- case ast.ComprehensionKind:
- cost = c.costComprehension(e)
- default:
- return CostEstimate{}
- }
- return cost
-}
-
-func (c *coster) costIdent(e ast.Expr) CostEstimate {
- identName := e.AsIdent()
- // build and track the field path
- if iterRange, ok := c.iterRanges.peek(identName); ok {
- switch c.checkedAST.GetType(iterRange).Kind() {
- case types.ListKind:
- c.addPath(e, append(c.exprPath[iterRange], "@items"))
- case types.MapKind:
- c.addPath(e, append(c.exprPath[iterRange], "@keys"))
- }
- } else {
- c.addPath(e, []string{identName})
- }
-
- return selectAndIdentCost
-}
-
-func (c *coster) costSelect(e ast.Expr) CostEstimate {
- sel := e.AsSelect()
- var sum CostEstimate
- if sel.IsTestOnly() {
- // recurse, but do not add any cost
- // this is equivalent to how evalTestOnly increments the runtime cost counter
- // but does not add any additional cost for the qualifier, except here we do
- // the reverse (ident adds cost)
- sum = sum.Add(c.presenceTestCost)
- sum = sum.Add(c.cost(sel.Operand()))
- return sum
- }
- sum = sum.Add(c.cost(sel.Operand()))
- targetType := c.getType(sel.Operand())
- switch targetType.Kind() {
- case types.MapKind, types.StructKind, types.TypeParamKind:
- sum = sum.Add(selectAndIdentCost)
- }
-
- // build and track the field path
- c.addPath(e, append(c.getPath(sel.Operand()), sel.FieldName()))
-
- return sum
-}
-
-func (c *coster) costCall(e ast.Expr) CostEstimate {
- call := e.AsCall()
- args := call.Args()
-
- var sum CostEstimate
-
- argTypes := make([]AstNode, len(args))
- argCosts := make([]CostEstimate, len(args))
- for i, arg := range args {
- argCosts[i] = c.cost(arg)
- argTypes[i] = c.newAstNode(arg)
- }
-
- overloadIDs := c.checkedAST.GetOverloadIDs(e.ID())
- if len(overloadIDs) == 0 {
- return CostEstimate{}
- }
- var targetType AstNode
- if call.IsMemberFunction() {
- sum = sum.Add(c.cost(call.Target()))
- targetType = c.newAstNode(call.Target())
- }
- // Pick a cost estimate range that covers all the overload cost estimation ranges
- fnCost := CostEstimate{Min: uint64(math.MaxUint64), Max: 0}
- var resultSize *SizeEstimate
- for _, overload := range overloadIDs {
- overloadCost := c.functionCost(call.FunctionName(), overload, &targetType, argTypes, argCosts)
- fnCost = fnCost.Union(overloadCost.CostEstimate)
- if overloadCost.ResultSize != nil {
- if resultSize == nil {
- resultSize = overloadCost.ResultSize
- } else {
- size := resultSize.Union(*overloadCost.ResultSize)
- resultSize = &size
- }
- }
- // build and track the field path for index operations
- switch overload {
- case overloads.IndexList:
- if len(args) > 0 {
- c.addPath(e, append(c.getPath(args[0]), "@items"))
- }
- case overloads.IndexMap:
- if len(args) > 0 {
- c.addPath(e, append(c.getPath(args[0]), "@values"))
- }
- }
- }
- if resultSize != nil {
- c.computedSizes[e.ID()] = *resultSize
- }
- return sum.Add(fnCost)
-}
-
-func (c *coster) costCreateList(e ast.Expr) CostEstimate {
- create := e.AsList()
- var sum CostEstimate
- for _, e := range create.Elements() {
- sum = sum.Add(c.cost(e))
- }
- return sum.Add(createListBaseCost)
-}
-
-func (c *coster) costCreateMap(e ast.Expr) CostEstimate {
- mapVal := e.AsMap()
- var sum CostEstimate
- for _, ent := range mapVal.Entries() {
- entry := ent.AsMapEntry()
- sum = sum.Add(c.cost(entry.Key()))
- sum = sum.Add(c.cost(entry.Value()))
- }
- return sum.Add(createMapBaseCost)
-}
-
-func (c *coster) costCreateStruct(e ast.Expr) CostEstimate {
- msgVal := e.AsStruct()
- var sum CostEstimate
- for _, ent := range msgVal.Fields() {
- field := ent.AsStructField()
- sum = sum.Add(c.cost(field.Value()))
- }
- return sum.Add(createMessageBaseCost)
-}
-
-func (c *coster) costComprehension(e ast.Expr) CostEstimate {
- comp := e.AsComprehension()
- var sum CostEstimate
- sum = sum.Add(c.cost(comp.IterRange()))
- sum = sum.Add(c.cost(comp.AccuInit()))
-
- // Track the iterRange of each IterVar for field path construction
- c.iterRanges.push(comp.IterVar(), comp.IterRange())
- loopCost := c.cost(comp.LoopCondition())
- stepCost := c.cost(comp.LoopStep())
- c.iterRanges.pop(comp.IterVar())
- sum = sum.Add(c.cost(comp.Result()))
- rangeCnt := c.sizeEstimate(c.newAstNode(comp.IterRange()))
- rangeCost := rangeCnt.MultiplyByCost(stepCost.Add(loopCost))
- sum = sum.Add(rangeCost)
-
- return sum
-}
-
-func (c *coster) sizeEstimate(t AstNode) SizeEstimate {
- if l := t.ComputedSize(); l != nil {
- return *l
- }
- if l := c.estimator.EstimateSize(t); l != nil {
- return *l
- }
- // return an estimate of 1 for return types of set
- // lengths, since strings/bytes/more complex objects could be of
- // variable length
- if isScalar(t.Type()) {
- // TODO: since the logic for size estimation is split between
- // ComputedSize and isScalar, changing one will likely require changing
- // the other, so they should be merged in the future if possible
- return SizeEstimate{Min: 1, Max: 1}
- }
- return SizeEstimate{Min: 0, Max: math.MaxUint64}
-}
-
-func (c *coster) functionCost(function, overloadID string, target *AstNode, args []AstNode, argCosts []CostEstimate) CallEstimate {
- argCostSum := func() CostEstimate {
- var sum CostEstimate
- for _, a := range argCosts {
- sum = sum.Add(a)
- }
- return sum
- }
- if len(c.overloadEstimators) != 0 {
- if estimator, found := c.overloadEstimators[overloadID]; found {
- if est := estimator(c.estimator, target, args); est != nil {
- callEst := *est
- return CallEstimate{CostEstimate: callEst.Add(argCostSum()), ResultSize: est.ResultSize}
- }
- }
- }
- if est := c.estimator.EstimateCallCost(function, overloadID, target, args); est != nil {
- callEst := *est
- return CallEstimate{CostEstimate: callEst.Add(argCostSum()), ResultSize: est.ResultSize}
- }
- switch overloadID {
- // O(n) functions
- case overloads.ExtFormatString:
- if target != nil {
- // ResultSize not calculated because we can't bound the max size.
- return CallEstimate{CostEstimate: c.sizeEstimate(*target).MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum())}
- }
- case overloads.StringToBytes:
- if len(args) == 1 {
- sz := c.sizeEstimate(args[0])
- // ResultSize max is when each char converts to 4 bytes.
- return CallEstimate{CostEstimate: sz.MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum()), ResultSize: &SizeEstimate{Min: sz.Min, Max: sz.Max * 4}}
- }
- case overloads.BytesToString:
- if len(args) == 1 {
- sz := c.sizeEstimate(args[0])
- // ResultSize min is when 4 bytes convert to 1 char.
- return CallEstimate{CostEstimate: sz.MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum()), ResultSize: &SizeEstimate{Min: sz.Min / 4, Max: sz.Max}}
- }
- case overloads.ExtQuoteString:
- if len(args) == 1 {
- sz := c.sizeEstimate(args[0])
- // ResultSize max is when each char is escaped. 2 quote chars always added.
- return CallEstimate{CostEstimate: sz.MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum()), ResultSize: &SizeEstimate{Min: sz.Min + 2, Max: sz.Max*2 + 2}}
- }
- case overloads.StartsWithString, overloads.EndsWithString:
- if len(args) == 1 {
- return CallEstimate{CostEstimate: c.sizeEstimate(args[0]).MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum())}
- }
- case overloads.InList:
- // If a list is composed entirely of constant values this is O(1), but we don't account for that here.
- // We just assume all list containment checks are O(n).
- if len(args) == 2 {
- return CallEstimate{CostEstimate: c.sizeEstimate(args[1]).MultiplyByCostFactor(1).Add(argCostSum())}
- }
- // O(nm) functions
- case overloads.MatchesString:
- // https://swtch.com/~rsc/regexp/regexp1.html applies to RE2 implementation supported by CEL
- if target != nil && len(args) == 1 {
- // Add one to string length for purposes of cost calculation to prevent product of string and regex to be 0
- // in case where string is empty but regex is still expensive.
- strCost := c.sizeEstimate(*target).Add(SizeEstimate{Min: 1, Max: 1}).MultiplyByCostFactor(common.StringTraversalCostFactor)
- // We don't know how many expressions are in the regex, just the string length (a huge
- // improvement here would be to somehow get a count the number of expressions in the regex or
- // how many states are in the regex state machine and use that to measure regex cost).
- // For now, we're making a guess that each expression in a regex is typically at least 4 chars
- // in length.
- regexCost := c.sizeEstimate(args[0]).MultiplyByCostFactor(common.RegexStringLengthCostFactor)
- return CallEstimate{CostEstimate: strCost.Multiply(regexCost).Add(argCostSum())}
- }
- case overloads.ContainsString:
- if target != nil && len(args) == 1 {
- strCost := c.sizeEstimate(*target).MultiplyByCostFactor(common.StringTraversalCostFactor)
- substrCost := c.sizeEstimate(args[0]).MultiplyByCostFactor(common.StringTraversalCostFactor)
- return CallEstimate{CostEstimate: strCost.Multiply(substrCost).Add(argCostSum())}
- }
- case overloads.LogicalOr, overloads.LogicalAnd:
- lhs := argCosts[0]
- rhs := argCosts[1]
- // min cost is min of LHS for short circuited && or ||
- argCost := CostEstimate{Min: lhs.Min, Max: lhs.Add(rhs).Max}
- return CallEstimate{CostEstimate: argCost}
- case overloads.Conditional:
- size := c.sizeEstimate(args[1]).Union(c.sizeEstimate(args[2]))
- conditionalCost := argCosts[0]
- ifTrueCost := argCosts[1]
- ifFalseCost := argCosts[2]
- argCost := conditionalCost.Add(ifTrueCost.Union(ifFalseCost))
- return CallEstimate{CostEstimate: argCost, ResultSize: &size}
- case overloads.AddString, overloads.AddBytes, overloads.AddList:
- if len(args) == 2 {
- lhsSize := c.sizeEstimate(args[0])
- rhsSize := c.sizeEstimate(args[1])
- resultSize := lhsSize.Add(rhsSize)
- switch overloadID {
- case overloads.AddList:
- // list concatenation is O(1), but we handle it here to track size
- return CallEstimate{CostEstimate: CostEstimate{Min: 1, Max: 1}.Add(argCostSum()), ResultSize: &resultSize}
- default:
- return CallEstimate{CostEstimate: resultSize.MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum()), ResultSize: &resultSize}
- }
- }
- case overloads.LessString, overloads.GreaterString, overloads.LessEqualsString, overloads.GreaterEqualsString,
- overloads.LessBytes, overloads.GreaterBytes, overloads.LessEqualsBytes, overloads.GreaterEqualsBytes,
- overloads.Equals, overloads.NotEquals:
- lhsCost := c.sizeEstimate(args[0])
- rhsCost := c.sizeEstimate(args[1])
- min := uint64(0)
- smallestMax := lhsCost.Max
- if rhsCost.Max < smallestMax {
- smallestMax = rhsCost.Max
- }
- if smallestMax > 0 {
- min = 1
- }
- // equality of 2 scalar values results in a cost of 1
- return CallEstimate{CostEstimate: CostEstimate{Min: min, Max: smallestMax}.MultiplyByCostFactor(common.StringTraversalCostFactor).Add(argCostSum())}
- }
- // O(1) functions
- // See CostTracker.costCall for more details about O(1) cost calculations
-
- // Benchmarks suggest that most of the other operations take +/- 50% of a base cost unit
- // which on an Intel xeon 2.20GHz CPU is 50ns.
- return CallEstimate{CostEstimate: CostEstimate{Min: 1, Max: 1}.Add(argCostSum())}
-}
-
-func (c *coster) getType(e ast.Expr) *types.Type {
- return c.checkedAST.GetType(e.ID())
-}
-
-func (c *coster) getPath(e ast.Expr) []string {
- return c.exprPath[e.ID()]
-}
-
-func (c *coster) addPath(e ast.Expr, path []string) {
- c.exprPath[e.ID()] = path
-}
-
-func (c *coster) newAstNode(e ast.Expr) *astNode {
- path := c.getPath(e)
- if len(path) > 0 && path[0] == parser.AccumulatorName {
- // only provide paths to root vars; omit accumulator vars
- path = nil
- }
- var derivedSize *SizeEstimate
- if size, ok := c.computedSizes[e.ID()]; ok {
- derivedSize = &size
- }
- return &astNode{
- path: path,
- t: c.getType(e),
- expr: e,
- derivedSize: derivedSize}
-}
-
-// isScalar returns true if the given type is known to be of a constant size at
-// compile time. isScalar will return false for strings (they are variable-width)
-// in addition to protobuf.Any and protobuf.Value (their size is not knowable at compile time).
-func isScalar(t *types.Type) bool {
- switch t.Kind() {
- case types.BoolKind, types.DoubleKind, types.DurationKind, types.IntKind, types.TimestampKind, types.UintKind:
- return true
- }
- return false
-}
-
-var (
- doubleTwoTo64 = math.Ldexp(1.0, 64)
-)
diff --git a/vendor/github.com/google/cel-go/checker/decls/BUILD.bazel b/vendor/github.com/google/cel-go/checker/decls/BUILD.bazel
deleted file mode 100644
index a6b0be292..000000000
--- a/vendor/github.com/google/cel-go/checker/decls/BUILD.bazel
+++ /dev/null
@@ -1,19 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "decls.go",
- ],
- importpath = "github.com/google/cel-go/checker/decls",
- deps = [
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//types/known/emptypb:go_default_library",
- "@org_golang_google_protobuf//types/known/structpb:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/checker/decls/decls.go b/vendor/github.com/google/cel-go/checker/decls/decls.go
deleted file mode 100644
index 0d91bef51..000000000
--- a/vendor/github.com/google/cel-go/checker/decls/decls.go
+++ /dev/null
@@ -1,237 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package decls provides helpers for creating variable and function declarations.
-package decls
-
-import (
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
- emptypb "google.golang.org/protobuf/types/known/emptypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
-)
-
-var (
- // Error type used to communicate issues during type-checking.
- Error = &exprpb.Type{
- TypeKind: &exprpb.Type_Error{
- Error: &emptypb.Empty{}}}
-
- // Dyn is a top-type used to represent any value.
- Dyn = &exprpb.Type{
- TypeKind: &exprpb.Type_Dyn{
- Dyn: &emptypb.Empty{}}}
-)
-
-// Commonly used types.
-var (
- Bool = NewPrimitiveType(exprpb.Type_BOOL)
- Bytes = NewPrimitiveType(exprpb.Type_BYTES)
- Double = NewPrimitiveType(exprpb.Type_DOUBLE)
- Int = NewPrimitiveType(exprpb.Type_INT64)
- Null = &exprpb.Type{
- TypeKind: &exprpb.Type_Null{
- Null: structpb.NullValue_NULL_VALUE}}
- String = NewPrimitiveType(exprpb.Type_STRING)
- Uint = NewPrimitiveType(exprpb.Type_UINT64)
-)
-
-// Well-known types.
-// TODO: Replace with an abstract type registry.
-var (
- Any = NewWellKnownType(exprpb.Type_ANY)
- Duration = NewWellKnownType(exprpb.Type_DURATION)
- Timestamp = NewWellKnownType(exprpb.Type_TIMESTAMP)
-)
-
-// NewAbstractType creates an abstract type declaration which references a proto
-// message name and may also include type parameters.
-func NewAbstractType(name string, paramTypes ...*exprpb.Type) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_AbstractType_{
- AbstractType: &exprpb.Type_AbstractType{
- Name: name,
- ParameterTypes: paramTypes}}}
-}
-
-// NewOptionalType constructs an abstract type indicating that the parameterized type
-// may be contained within the object.
-func NewOptionalType(paramType *exprpb.Type) *exprpb.Type {
- return NewAbstractType("optional", paramType)
-}
-
-// NewFunctionType creates a function invocation contract, typically only used
-// by type-checking steps after overload resolution.
-func NewFunctionType(resultType *exprpb.Type,
- argTypes ...*exprpb.Type) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_Function{
- Function: &exprpb.Type_FunctionType{
- ResultType: resultType,
- ArgTypes: argTypes}}}
-}
-
-// NewFunction creates a named function declaration with one or more overloads.
-func NewFunction(name string,
- overloads ...*exprpb.Decl_FunctionDecl_Overload) *exprpb.Decl {
- return &exprpb.Decl{
- Name: name,
- DeclKind: &exprpb.Decl_Function{
- Function: &exprpb.Decl_FunctionDecl{
- Overloads: overloads}}}
-}
-
-// NewIdent creates a named identifier declaration with an optional literal
-// value.
-//
-// Literal values are typically only associated with enum identifiers.
-//
-// Deprecated: Use NewVar or NewConst instead.
-func NewIdent(name string, t *exprpb.Type, v *exprpb.Constant) *exprpb.Decl {
- return &exprpb.Decl{
- Name: name,
- DeclKind: &exprpb.Decl_Ident{
- Ident: &exprpb.Decl_IdentDecl{
- Type: t,
- Value: v}}}
-}
-
-// NewConst creates a constant identifier with a CEL constant literal value.
-func NewConst(name string, t *exprpb.Type, v *exprpb.Constant) *exprpb.Decl {
- return NewIdent(name, t, v)
-}
-
-// NewVar creates a variable identifier.
-func NewVar(name string, t *exprpb.Type) *exprpb.Decl {
- return NewIdent(name, t, nil)
-}
-
-// NewInstanceOverload creates a instance function overload contract.
-// First element of argTypes is instance.
-func NewInstanceOverload(id string, argTypes []*exprpb.Type,
- resultType *exprpb.Type) *exprpb.Decl_FunctionDecl_Overload {
- return &exprpb.Decl_FunctionDecl_Overload{
- OverloadId: id,
- ResultType: resultType,
- Params: argTypes,
- IsInstanceFunction: true}
-}
-
-// NewListType generates a new list with elements of a certain type.
-func NewListType(elem *exprpb.Type) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_ListType_{
- ListType: &exprpb.Type_ListType{
- ElemType: elem}}}
-}
-
-// NewMapType generates a new map with typed keys and values.
-func NewMapType(key *exprpb.Type, value *exprpb.Type) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_MapType_{
- MapType: &exprpb.Type_MapType{
- KeyType: key,
- ValueType: value}}}
-}
-
-// NewObjectType creates an object type for a qualified type name.
-func NewObjectType(typeName string) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_MessageType{
- MessageType: typeName}}
-}
-
-// NewOverload creates a function overload declaration which contains a unique
-// overload id as well as the expected argument and result types. Overloads
-// must be aggregated within a Function declaration.
-func NewOverload(id string, argTypes []*exprpb.Type,
- resultType *exprpb.Type) *exprpb.Decl_FunctionDecl_Overload {
- return &exprpb.Decl_FunctionDecl_Overload{
- OverloadId: id,
- ResultType: resultType,
- Params: argTypes,
- IsInstanceFunction: false}
-}
-
-// NewParameterizedInstanceOverload creates a parametric function instance overload type.
-func NewParameterizedInstanceOverload(id string,
- argTypes []*exprpb.Type,
- resultType *exprpb.Type,
- typeParams []string) *exprpb.Decl_FunctionDecl_Overload {
- return &exprpb.Decl_FunctionDecl_Overload{
- OverloadId: id,
- ResultType: resultType,
- Params: argTypes,
- TypeParams: typeParams,
- IsInstanceFunction: true}
-}
-
-// NewParameterizedOverload creates a parametric function overload type.
-func NewParameterizedOverload(id string,
- argTypes []*exprpb.Type,
- resultType *exprpb.Type,
- typeParams []string) *exprpb.Decl_FunctionDecl_Overload {
- return &exprpb.Decl_FunctionDecl_Overload{
- OverloadId: id,
- ResultType: resultType,
- Params: argTypes,
- TypeParams: typeParams,
- IsInstanceFunction: false}
-}
-
-// NewPrimitiveType creates a type for a primitive value. See the var declarations
-// for Int, Uint, etc.
-func NewPrimitiveType(primitive exprpb.Type_PrimitiveType) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_Primitive{
- Primitive: primitive}}
-}
-
-// NewTypeType creates a new type designating a type.
-func NewTypeType(nested *exprpb.Type) *exprpb.Type {
- if nested == nil {
- // must set the nested field for a valid oneof option
- nested = &exprpb.Type{}
- }
- return &exprpb.Type{
- TypeKind: &exprpb.Type_Type{
- Type: nested}}
-}
-
-// NewTypeParamType creates a type corresponding to a named, contextual parameter.
-func NewTypeParamType(name string) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_TypeParam{
- TypeParam: name}}
-}
-
-// NewWellKnownType creates a type corresponding to a protobuf well-known type
-// value.
-func NewWellKnownType(wellKnown exprpb.Type_WellKnownType) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_WellKnown{
- WellKnown: wellKnown}}
-}
-
-// NewWrapperType creates a wrapped primitive type instance. Wrapped types
-// are roughly equivalent to a nullable, or optionally valued type.
-func NewWrapperType(wrapped *exprpb.Type) *exprpb.Type {
- primitive := wrapped.GetPrimitive()
- if primitive == exprpb.Type_PRIMITIVE_TYPE_UNSPECIFIED {
- // TODO: return an error
- panic("Wrapped type must be a primitive")
- }
- return &exprpb.Type{
- TypeKind: &exprpb.Type_Wrapper{
- Wrapper: primitive}}
-}
diff --git a/vendor/github.com/google/cel-go/checker/env.go b/vendor/github.com/google/cel-go/checker/env.go
deleted file mode 100644
index 70682b17c..000000000
--- a/vendor/github.com/google/cel-go/checker/env.go
+++ /dev/null
@@ -1,276 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "fmt"
- "strings"
-
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/decls"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/parser"
-)
-
-type aggregateLiteralElementType int
-
-const (
- dynElementType aggregateLiteralElementType = iota
- homogenousElementType aggregateLiteralElementType = 1 << iota
-)
-
-var (
- crossTypeNumericComparisonOverloads = map[string]struct{}{
- // double <-> int | uint
- overloads.LessDoubleInt64: {},
- overloads.LessDoubleUint64: {},
- overloads.LessEqualsDoubleInt64: {},
- overloads.LessEqualsDoubleUint64: {},
- overloads.GreaterDoubleInt64: {},
- overloads.GreaterDoubleUint64: {},
- overloads.GreaterEqualsDoubleInt64: {},
- overloads.GreaterEqualsDoubleUint64: {},
- // int <-> double | uint
- overloads.LessInt64Double: {},
- overloads.LessInt64Uint64: {},
- overloads.LessEqualsInt64Double: {},
- overloads.LessEqualsInt64Uint64: {},
- overloads.GreaterInt64Double: {},
- overloads.GreaterInt64Uint64: {},
- overloads.GreaterEqualsInt64Double: {},
- overloads.GreaterEqualsInt64Uint64: {},
- // uint <-> double | int
- overloads.LessUint64Double: {},
- overloads.LessUint64Int64: {},
- overloads.LessEqualsUint64Double: {},
- overloads.LessEqualsUint64Int64: {},
- overloads.GreaterUint64Double: {},
- overloads.GreaterUint64Int64: {},
- overloads.GreaterEqualsUint64Double: {},
- overloads.GreaterEqualsUint64Int64: {},
- }
-)
-
-// Env is the environment for type checking.
-//
-// The Env is comprised of a container, type provider, declarations, and other related objects
-// which can be used to assist with type-checking.
-type Env struct {
- container *containers.Container
- provider types.Provider
- declarations *Scopes
- aggLitElemType aggregateLiteralElementType
- filteredOverloadIDs map[string]struct{}
-}
-
-// NewEnv returns a new *Env with the given parameters.
-func NewEnv(container *containers.Container, provider types.Provider, opts ...Option) (*Env, error) {
- declarations := newScopes()
- declarations.Push()
-
- envOptions := &options{}
- for _, opt := range opts {
- if err := opt(envOptions); err != nil {
- return nil, err
- }
- }
- aggLitElemType := dynElementType
- if envOptions.homogeneousAggregateLiterals {
- aggLitElemType = homogenousElementType
- }
- filteredOverloadIDs := crossTypeNumericComparisonOverloads
- if envOptions.crossTypeNumericComparisons {
- filteredOverloadIDs = make(map[string]struct{})
- }
- if envOptions.validatedDeclarations != nil {
- declarations = envOptions.validatedDeclarations.Copy()
- }
- return &Env{
- container: container,
- provider: provider,
- declarations: declarations,
- aggLitElemType: aggLitElemType,
- filteredOverloadIDs: filteredOverloadIDs,
- }, nil
-}
-
-// AddIdents configures the checker with a list of variable declarations.
-//
-// If there are overlapping declarations, the method will error.
-func (e *Env) AddIdents(declarations ...*decls.VariableDecl) error {
- errMsgs := make([]errorMsg, 0)
- for _, d := range declarations {
- errMsgs = append(errMsgs, e.addIdent(d))
- }
- return formatError(errMsgs)
-}
-
-// AddFunctions configures the checker with a list of function declarations.
-//
-// If there are overlapping declarations, the method will error.
-func (e *Env) AddFunctions(declarations ...*decls.FunctionDecl) error {
- errMsgs := make([]errorMsg, 0)
- for _, d := range declarations {
- errMsgs = append(errMsgs, e.setFunction(d)...)
- }
- return formatError(errMsgs)
-}
-
-// LookupIdent returns a Decl proto for typeName as an identifier in the Env.
-// Returns nil if no such identifier is found in the Env.
-func (e *Env) LookupIdent(name string) *decls.VariableDecl {
- for _, candidate := range e.container.ResolveCandidateNames(name) {
- if ident := e.declarations.FindIdent(candidate); ident != nil {
- return ident
- }
-
- // Next try to import the name as a reference to a message type. If found,
- // the declaration is added to the outest (global) scope of the
- // environment, so next time we can access it faster.
- if t, found := e.provider.FindStructType(candidate); found {
- decl := decls.NewVariable(candidate, t)
- e.declarations.AddIdent(decl)
- return decl
- }
-
- // Next try to import this as an enum value by splitting the name in a type prefix and
- // the enum inside.
- if enumValue := e.provider.EnumValue(candidate); enumValue.Type() != types.ErrType {
- decl := decls.NewConstant(candidate, types.IntType, enumValue)
- e.declarations.AddIdent(decl)
- return decl
- }
- }
- return nil
-}
-
-// LookupFunction returns a Decl proto for typeName as a function in env.
-// Returns nil if no such function is found in env.
-func (e *Env) LookupFunction(name string) *decls.FunctionDecl {
- for _, candidate := range e.container.ResolveCandidateNames(name) {
- if fn := e.declarations.FindFunction(candidate); fn != nil {
- return fn
- }
- }
- return nil
-}
-
-// setFunction adds the function Decl to the Env.
-// Adds a function decl if one doesn't already exist, then adds all overloads from the Decl.
-// If overload overlaps with an existing overload, adds to the errors in the Env instead.
-func (e *Env) setFunction(fn *decls.FunctionDecl) []errorMsg {
- errMsgs := make([]errorMsg, 0)
- current := e.declarations.FindFunction(fn.Name())
- if current != nil {
- var err error
- current, err = current.Merge(fn)
- if err != nil {
- return append(errMsgs, errorMsg(err.Error()))
- }
- } else {
- current = fn
- }
- for _, overload := range current.OverloadDecls() {
- for _, macro := range parser.AllMacros {
- if macro.Function() == current.Name() &&
- macro.IsReceiverStyle() == overload.IsMemberFunction() &&
- macro.ArgCount() == len(overload.ArgTypes()) {
- errMsgs = append(errMsgs, overlappingMacroError(current.Name(), macro.ArgCount()))
- }
- }
- if len(errMsgs) > 0 {
- return errMsgs
- }
- }
- e.declarations.SetFunction(current)
- return errMsgs
-}
-
-// addIdent adds the Decl to the declarations in the Env.
-// Returns a non-empty errorMsg if the identifier is already declared in the scope.
-func (e *Env) addIdent(decl *decls.VariableDecl) errorMsg {
- current := e.declarations.FindIdentInScope(decl.Name())
- if current != nil {
- if current.DeclarationIsEquivalent(decl) {
- return ""
- }
- return overlappingIdentifierError(decl.Name())
- }
- e.declarations.AddIdent(decl)
- return ""
-}
-
-// isOverloadDisabled returns whether the overloadID is disabled in the current environment.
-func (e *Env) isOverloadDisabled(overloadID string) bool {
- _, found := e.filteredOverloadIDs[overloadID]
- return found
-}
-
-// validatedDeclarations returns a reference to the validated variable and function declaration scope stack.
-// must be copied before use.
-func (e *Env) validatedDeclarations() *Scopes {
- return e.declarations
-}
-
-// enterScope creates a new Env instance with a new innermost declaration scope.
-func (e *Env) enterScope() *Env {
- childDecls := e.declarations.Push()
- return &Env{
- declarations: childDecls,
- container: e.container,
- provider: e.provider,
- aggLitElemType: e.aggLitElemType,
- }
-}
-
-// exitScope creates a new Env instance with the nearest outer declaration scope.
-func (e *Env) exitScope() *Env {
- parentDecls := e.declarations.Pop()
- return &Env{
- declarations: parentDecls,
- container: e.container,
- provider: e.provider,
- aggLitElemType: e.aggLitElemType,
- }
-}
-
-// errorMsg is a type alias meant to represent error-based return values which
-// may be accumulated into an error at a later point in execution.
-type errorMsg string
-
-func overlappingIdentifierError(name string) errorMsg {
- return errorMsg(fmt.Sprintf("overlapping identifier for name '%s'", name))
-}
-
-func overlappingMacroError(name string, argCount int) errorMsg {
- return errorMsg(fmt.Sprintf(
- "overlapping macro for name '%s' with %d args", name, argCount))
-}
-
-func formatError(errMsgs []errorMsg) error {
- errStrs := make([]string, 0)
- if len(errMsgs) > 0 {
- for i := 0; i < len(errMsgs); i++ {
- if errMsgs[i] != "" {
- errStrs = append(errStrs, string(errMsgs[i]))
- }
- }
- }
- if len(errStrs) > 0 {
- return fmt.Errorf("%s", strings.Join(errStrs, "\n"))
- }
- return nil
-}
diff --git a/vendor/github.com/google/cel-go/checker/errors.go b/vendor/github.com/google/cel-go/checker/errors.go
deleted file mode 100644
index 8b3bf0b8b..000000000
--- a/vendor/github.com/google/cel-go/checker/errors.go
+++ /dev/null
@@ -1,88 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/types"
-)
-
-// typeErrors is a specialization of Errors.
-type typeErrors struct {
- errs *common.Errors
-}
-
-func (e *typeErrors) fieldTypeMismatch(id int64, l common.Location, name string, field, value *types.Type) {
- e.errs.ReportErrorAtID(id, l, "expected type of field '%s' is '%s' but provided type is '%s'",
- name, FormatCELType(field), FormatCELType(value))
-}
-
-func (e *typeErrors) incompatibleType(id int64, l common.Location, ex ast.Expr, prev, next *types.Type) {
- e.errs.ReportErrorAtID(id, l,
- "incompatible type already exists for expression: %v(%d) old:%v, new:%v", ex, ex.ID(), prev, next)
-}
-
-func (e *typeErrors) noMatchingOverload(id int64, l common.Location, name string, args []*types.Type, isInstance bool) {
- signature := formatFunctionDeclType(nil, args, isInstance)
- e.errs.ReportErrorAtID(id, l, "found no matching overload for '%s' applied to '%s'", name, signature)
-}
-
-func (e *typeErrors) notAComprehensionRange(id int64, l common.Location, t *types.Type) {
- e.errs.ReportErrorAtID(id, l, "expression of type '%s' cannot be range of a comprehension (must be list, map, or dynamic)",
- FormatCELType(t))
-}
-
-func (e *typeErrors) notAnOptionalFieldSelection(id int64, l common.Location, field ast.Expr) {
- e.errs.ReportErrorAtID(id, l, "unsupported optional field selection: %v", field)
-}
-
-func (e *typeErrors) notAType(id int64, l common.Location, typeName string) {
- e.errs.ReportErrorAtID(id, l, "'%s' is not a type", typeName)
-}
-
-func (e *typeErrors) notAMessageType(id int64, l common.Location, typeName string) {
- e.errs.ReportErrorAtID(id, l, "'%s' is not a message type", typeName)
-}
-
-func (e *typeErrors) referenceRedefinition(id int64, l common.Location, ex ast.Expr, prev, next *ast.ReferenceInfo) {
- e.errs.ReportErrorAtID(id, l,
- "reference already exists for expression: %v(%d) old:%v, new:%v", ex, ex.ID(), prev, next)
-}
-
-func (e *typeErrors) typeDoesNotSupportFieldSelection(id int64, l common.Location, t *types.Type) {
- e.errs.ReportErrorAtID(id, l, "type '%s' does not support field selection", FormatCELType(t))
-}
-
-func (e *typeErrors) typeMismatch(id int64, l common.Location, expected, actual *types.Type) {
- e.errs.ReportErrorAtID(id, l, "expected type '%s' but found '%s'",
- FormatCELType(expected), FormatCELType(actual))
-}
-
-func (e *typeErrors) undefinedField(id int64, l common.Location, field string) {
- e.errs.ReportErrorAtID(id, l, "undefined field '%s'", field)
-}
-
-func (e *typeErrors) undeclaredReference(id int64, l common.Location, container string, name string) {
- e.errs.ReportErrorAtID(id, l, "undeclared reference to '%s' (in container '%s')", name, container)
-}
-
-func (e *typeErrors) unexpectedFailedResolution(id int64, l common.Location, typeName string) {
- e.errs.ReportErrorAtID(id, l, "unexpected failed resolution of '%s'", typeName)
-}
-
-func (e *typeErrors) unexpectedASTType(id int64, l common.Location, kind, typeName string) {
- e.errs.ReportErrorAtID(id, l, "unexpected %s type: %v", kind, typeName)
-}
diff --git a/vendor/github.com/google/cel-go/checker/format.go b/vendor/github.com/google/cel-go/checker/format.go
deleted file mode 100644
index 95842905e..000000000
--- a/vendor/github.com/google/cel-go/checker/format.go
+++ /dev/null
@@ -1,216 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "fmt"
- "strings"
-
- chkdecls "github.com/google/cel-go/checker/decls"
- "github.com/google/cel-go/common/types"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-const (
- kindUnknown = iota + 1
- kindError
- kindFunction
- kindDyn
- kindPrimitive
- kindWellKnown
- kindWrapper
- kindNull
- kindAbstract
- kindType
- kindList
- kindMap
- kindObject
- kindTypeParam
-)
-
-// FormatCheckedType converts a type message into a string representation.
-func FormatCheckedType(t *exprpb.Type) string {
- switch kindOf(t) {
- case kindDyn:
- return "dyn"
- case kindFunction:
- return formatFunctionExprType(t.GetFunction().GetResultType(),
- t.GetFunction().GetArgTypes(),
- false)
- case kindList:
- return fmt.Sprintf("list(%s)", FormatCheckedType(t.GetListType().GetElemType()))
- case kindObject:
- return t.GetMessageType()
- case kindMap:
- return fmt.Sprintf("map(%s, %s)",
- FormatCheckedType(t.GetMapType().GetKeyType()),
- FormatCheckedType(t.GetMapType().GetValueType()))
- case kindNull:
- return "null"
- case kindPrimitive:
- switch t.GetPrimitive() {
- case exprpb.Type_UINT64:
- return "uint"
- case exprpb.Type_INT64:
- return "int"
- }
- return strings.Trim(strings.ToLower(t.GetPrimitive().String()), " ")
- case kindType:
- if t.GetType() == nil || t.GetType().GetTypeKind() == nil {
- return "type"
- }
- return fmt.Sprintf("type(%s)", FormatCheckedType(t.GetType()))
- case kindWellKnown:
- switch t.GetWellKnown() {
- case exprpb.Type_ANY:
- return "any"
- case exprpb.Type_DURATION:
- return "duration"
- case exprpb.Type_TIMESTAMP:
- return "timestamp"
- }
- case kindWrapper:
- return fmt.Sprintf("wrapper(%s)",
- FormatCheckedType(chkdecls.NewPrimitiveType(t.GetWrapper())))
- case kindError:
- return "!error!"
- case kindTypeParam:
- return t.GetTypeParam()
- case kindAbstract:
- at := t.GetAbstractType()
- params := at.GetParameterTypes()
- paramStrs := make([]string, len(params))
- for i, p := range params {
- paramStrs[i] = FormatCheckedType(p)
- }
- return fmt.Sprintf("%s(%s)", at.GetName(), strings.Join(paramStrs, ", "))
- }
- return t.String()
-}
-
-type formatter func(any) string
-
-// FormatCELType formats a types.Type value to a string representation.
-//
-// The type formatting is identical to FormatCheckedType.
-func FormatCELType(t any) string {
- dt := t.(*types.Type)
- switch dt.Kind() {
- case types.AnyKind:
- return "any"
- case types.DurationKind:
- return "duration"
- case types.ErrorKind:
- return "!error!"
- case types.NullTypeKind:
- return "null"
- case types.TimestampKind:
- return "timestamp"
- case types.TypeParamKind:
- return dt.TypeName()
- case types.OpaqueKind:
- if dt.TypeName() == "function" {
- // There is no explicit function type in the new types representation, so information like
- // whether the function is a member function is absent.
- return formatFunctionDeclType(dt.Parameters()[0], dt.Parameters()[1:], false)
- }
- case types.UnspecifiedKind:
- return ""
- }
- if len(dt.Parameters()) == 0 {
- return dt.DeclaredTypeName()
- }
- paramTypeNames := make([]string, 0, len(dt.Parameters()))
- for _, p := range dt.Parameters() {
- paramTypeNames = append(paramTypeNames, FormatCELType(p))
- }
- return fmt.Sprintf("%s(%s)", dt.TypeName(), strings.Join(paramTypeNames, ", "))
-}
-
-func formatExprType(t any) string {
- if t == nil {
- return ""
- }
- return FormatCheckedType(t.(*exprpb.Type))
-}
-
-func formatFunctionExprType(resultType *exprpb.Type, argTypes []*exprpb.Type, isInstance bool) string {
- return formatFunctionInternal[*exprpb.Type](resultType, argTypes, isInstance, formatExprType)
-}
-
-func formatFunctionDeclType(resultType *types.Type, argTypes []*types.Type, isInstance bool) string {
- return formatFunctionInternal[*types.Type](resultType, argTypes, isInstance, FormatCELType)
-}
-
-func formatFunctionInternal[T any](resultType T, argTypes []T, isInstance bool, format formatter) string {
- result := ""
- if isInstance {
- target := argTypes[0]
- argTypes = argTypes[1:]
- result += format(target)
- result += "."
- }
- result += "("
- for i, arg := range argTypes {
- if i > 0 {
- result += ", "
- }
- result += format(arg)
- }
- result += ")"
- rt := format(resultType)
- if rt != "" {
- result += " -> "
- result += rt
- }
- return result
-}
-
-// kindOf returns the kind of the type as defined in the checked.proto.
-func kindOf(t *exprpb.Type) int {
- if t == nil || t.TypeKind == nil {
- return kindUnknown
- }
- switch t.GetTypeKind().(type) {
- case *exprpb.Type_Error:
- return kindError
- case *exprpb.Type_Function:
- return kindFunction
- case *exprpb.Type_Dyn:
- return kindDyn
- case *exprpb.Type_Primitive:
- return kindPrimitive
- case *exprpb.Type_WellKnown:
- return kindWellKnown
- case *exprpb.Type_Wrapper:
- return kindWrapper
- case *exprpb.Type_Null:
- return kindNull
- case *exprpb.Type_Type:
- return kindType
- case *exprpb.Type_ListType_:
- return kindList
- case *exprpb.Type_MapType_:
- return kindMap
- case *exprpb.Type_MessageType:
- return kindObject
- case *exprpb.Type_TypeParam:
- return kindTypeParam
- case *exprpb.Type_AbstractType_:
- return kindAbstract
- }
- return kindUnknown
-}
diff --git a/vendor/github.com/google/cel-go/checker/mapping.go b/vendor/github.com/google/cel-go/checker/mapping.go
deleted file mode 100644
index 8163a908a..000000000
--- a/vendor/github.com/google/cel-go/checker/mapping.go
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "github.com/google/cel-go/common/types"
-)
-
-type mapping struct {
- mapping map[string]*types.Type
-}
-
-func newMapping() *mapping {
- return &mapping{
- mapping: make(map[string]*types.Type),
- }
-}
-
-func (m *mapping) add(from, to *types.Type) {
- m.mapping[FormatCELType(from)] = to
-}
-
-func (m *mapping) find(from *types.Type) (*types.Type, bool) {
- if r, found := m.mapping[FormatCELType(from)]; found {
- return r, found
- }
- return nil, false
-}
-
-func (m *mapping) copy() *mapping {
- c := newMapping()
-
- for k, v := range m.mapping {
- c.mapping[k] = v
- }
- return c
-}
diff --git a/vendor/github.com/google/cel-go/checker/options.go b/vendor/github.com/google/cel-go/checker/options.go
deleted file mode 100644
index 0560c3813..000000000
--- a/vendor/github.com/google/cel-go/checker/options.go
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-type options struct {
- crossTypeNumericComparisons bool
- homogeneousAggregateLiterals bool
- validatedDeclarations *Scopes
-}
-
-// Option is a functional option for configuring the type-checker
-type Option func(*options) error
-
-// CrossTypeNumericComparisons toggles type-checker support for numeric comparisons across type
-// See https://github.com/google/cel-spec/wiki/proposal-210 for more details.
-func CrossTypeNumericComparisons(enabled bool) Option {
- return func(opts *options) error {
- opts.crossTypeNumericComparisons = enabled
- return nil
- }
-}
-
-// ValidatedDeclarations provides a references to validated declarations which will be copied
-// into new checker instances.
-func ValidatedDeclarations(env *Env) Option {
- return func(opts *options) error {
- opts.validatedDeclarations = env.validatedDeclarations()
- return nil
- }
-}
diff --git a/vendor/github.com/google/cel-go/checker/printer.go b/vendor/github.com/google/cel-go/checker/printer.go
deleted file mode 100644
index 7a3984f02..000000000
--- a/vendor/github.com/google/cel-go/checker/printer.go
+++ /dev/null
@@ -1,74 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "sort"
-
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/debug"
-)
-
-type semanticAdorner struct {
- checked *ast.AST
-}
-
-var _ debug.Adorner = &semanticAdorner{}
-
-func (a *semanticAdorner) GetMetadata(elem any) string {
- result := ""
- e, isExpr := elem.(ast.Expr)
- if !isExpr {
- return result
- }
- t := a.checked.TypeMap()[e.ID()]
- if t != nil {
- result += "~"
- result += FormatCELType(t)
- }
-
- switch e.Kind() {
- case ast.IdentKind,
- ast.CallKind,
- ast.ListKind,
- ast.StructKind,
- ast.SelectKind:
- if ref, found := a.checked.ReferenceMap()[e.ID()]; found {
- if len(ref.OverloadIDs) == 0 {
- result += "^" + ref.Name
- } else {
- sort.Strings(ref.OverloadIDs)
- for i, overload := range ref.OverloadIDs {
- if i == 0 {
- result += "^"
- } else {
- result += "|"
- }
- result += overload
- }
- }
- }
- }
-
- return result
-}
-
-// Print returns a string representation of the Expr message,
-// annotated with types from the CheckedExpr. The Expr must
-// be a sub-expression embedded in the CheckedExpr.
-func Print(e ast.Expr, checked *ast.AST) string {
- a := &semanticAdorner{checked: checked}
- return debug.ToAdornedDebugString(e, a)
-}
diff --git a/vendor/github.com/google/cel-go/checker/scopes.go b/vendor/github.com/google/cel-go/checker/scopes.go
deleted file mode 100644
index 8bb73ddb6..000000000
--- a/vendor/github.com/google/cel-go/checker/scopes.go
+++ /dev/null
@@ -1,147 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "github.com/google/cel-go/common/decls"
-)
-
-// Scopes represents nested Decl sets where the Scopes value contains a Groups containing all
-// identifiers in scope and an optional parent representing outer scopes.
-// Each Groups value is a mapping of names to Decls in the ident and function namespaces.
-// Lookups are performed such that bindings in inner scopes shadow those in outer scopes.
-type Scopes struct {
- parent *Scopes
- scopes *Group
-}
-
-// newScopes creates a new, empty Scopes.
-// Some operations can't be safely performed until a Group is added with Push.
-func newScopes() *Scopes {
- return &Scopes{
- scopes: newGroup(),
- }
-}
-
-// Copy creates a copy of the current Scopes values, including a copy of its parent if non-nil.
-func (s *Scopes) Copy() *Scopes {
- cpy := newScopes()
- if s == nil {
- return cpy
- }
- if s.parent != nil {
- cpy.parent = s.parent.Copy()
- }
- cpy.scopes = s.scopes.copy()
- return cpy
-}
-
-// Push creates a new Scopes value which references the current Scope as its parent.
-func (s *Scopes) Push() *Scopes {
- return &Scopes{
- parent: s,
- scopes: newGroup(),
- }
-}
-
-// Pop returns the parent Scopes value for the current scope, or the current scope if the parent
-// is nil.
-func (s *Scopes) Pop() *Scopes {
- if s.parent != nil {
- return s.parent
- }
- // TODO: Consider whether this should be an error / panic.
- return s
-}
-
-// AddIdent adds the ident Decl in the current scope.
-// Note: If the name collides with an existing identifier in the scope, the Decl is overwritten.
-func (s *Scopes) AddIdent(decl *decls.VariableDecl) {
- s.scopes.idents[decl.Name()] = decl
-}
-
-// FindIdent finds the first ident Decl with a matching name in Scopes, or nil if one cannot be
-// found.
-// Note: The search is performed from innermost to outermost.
-func (s *Scopes) FindIdent(name string) *decls.VariableDecl {
- if ident, found := s.scopes.idents[name]; found {
- return ident
- }
- if s.parent != nil {
- return s.parent.FindIdent(name)
- }
- return nil
-}
-
-// FindIdentInScope finds the first ident Decl with a matching name in the current Scopes value, or
-// nil if one does not exist.
-// Note: The search is only performed on the current scope and does not search outer scopes.
-func (s *Scopes) FindIdentInScope(name string) *decls.VariableDecl {
- if ident, found := s.scopes.idents[name]; found {
- return ident
- }
- return nil
-}
-
-// SetFunction adds the function Decl to the current scope.
-// Note: Any previous entry for a function in the current scope with the same name is overwritten.
-func (s *Scopes) SetFunction(fn *decls.FunctionDecl) {
- s.scopes.functions[fn.Name()] = fn
-}
-
-// FindFunction finds the first function Decl with a matching name in Scopes.
-// The search is performed from innermost to outermost.
-// Returns nil if no such function in Scopes.
-func (s *Scopes) FindFunction(name string) *decls.FunctionDecl {
- if fn, found := s.scopes.functions[name]; found {
- return fn
- }
- if s.parent != nil {
- return s.parent.FindFunction(name)
- }
- return nil
-}
-
-// Group is a set of Decls that is pushed on or popped off a Scopes as a unit.
-// Contains separate namespaces for identifier and function Decls.
-// (Should be named "Scope" perhaps?)
-type Group struct {
- idents map[string]*decls.VariableDecl
- functions map[string]*decls.FunctionDecl
-}
-
-// copy creates a new Group instance with a shallow copy of the variables and functions.
-// If callers need to mutate the exprpb.Decl definitions for a Function, they should copy-on-write.
-func (g *Group) copy() *Group {
- cpy := &Group{
- idents: make(map[string]*decls.VariableDecl, len(g.idents)),
- functions: make(map[string]*decls.FunctionDecl, len(g.functions)),
- }
- for n, id := range g.idents {
- cpy.idents[n] = id
- }
- for n, fn := range g.functions {
- cpy.functions[n] = fn
- }
- return cpy
-}
-
-// newGroup creates a new Group with empty maps for identifiers and functions.
-func newGroup() *Group {
- return &Group{
- idents: make(map[string]*decls.VariableDecl),
- functions: make(map[string]*decls.FunctionDecl),
- }
-}
diff --git a/vendor/github.com/google/cel-go/checker/standard.go b/vendor/github.com/google/cel-go/checker/standard.go
deleted file mode 100644
index 11b35b80e..000000000
--- a/vendor/github.com/google/cel-go/checker/standard.go
+++ /dev/null
@@ -1,35 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "github.com/google/cel-go/common/stdlib"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// StandardFunctions returns the Decls for all functions in the evaluator.
-//
-// Deprecated: prefer stdlib.FunctionExprDecls()
-func StandardFunctions() []*exprpb.Decl {
- return stdlib.FunctionExprDecls()
-}
-
-// StandardTypes returns the set of type identifiers for standard library types.
-//
-// Deprecated: prefer stdlib.TypeExprDecls()
-func StandardTypes() []*exprpb.Decl {
- return stdlib.TypeExprDecls()
-}
diff --git a/vendor/github.com/google/cel-go/checker/types.go b/vendor/github.com/google/cel-go/checker/types.go
deleted file mode 100644
index e2373d1b7..000000000
--- a/vendor/github.com/google/cel-go/checker/types.go
+++ /dev/null
@@ -1,309 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package checker
-
-import (
- "github.com/google/cel-go/common/types"
-)
-
-// isDyn returns true if the input t is either type DYN or a well-known ANY message.
-func isDyn(t *types.Type) bool {
- // Note: object type values that are well-known and map to a DYN value in practice
- // are sanitized prior to being added to the environment.
- switch t.Kind() {
- case types.DynKind, types.AnyKind:
- return true
- default:
- return false
- }
-}
-
-// isDynOrError returns true if the input is either an Error, DYN, or well-known ANY message.
-func isDynOrError(t *types.Type) bool {
- return isError(t) || isDyn(t)
-}
-
-func isError(t *types.Type) bool {
- return t.Kind() == types.ErrorKind
-}
-
-func isOptional(t *types.Type) bool {
- if t.Kind() == types.OpaqueKind {
- return t.TypeName() == "optional"
- }
- return false
-}
-
-func maybeUnwrapOptional(t *types.Type) (*types.Type, bool) {
- if isOptional(t) {
- return t.Parameters()[0], true
- }
- return t, false
-}
-
-// isEqualOrLessSpecific checks whether one type is equal or less specific than the other one.
-// A type is less specific if it matches the other type using the DYN type.
-func isEqualOrLessSpecific(t1, t2 *types.Type) bool {
- kind1, kind2 := t1.Kind(), t2.Kind()
- // The first type is less specific.
- if isDyn(t1) || kind1 == types.TypeParamKind {
- return true
- }
- // The first type is not less specific.
- if isDyn(t2) || kind2 == types.TypeParamKind {
- return false
- }
- // Types must be of the same kind to be equal.
- if kind1 != kind2 {
- return false
- }
-
- // With limited exceptions for ANY and JSON values, the types must agree and be equivalent in
- // order to return true.
- switch kind1 {
- case types.OpaqueKind:
- if t1.TypeName() != t2.TypeName() ||
- len(t1.Parameters()) != len(t2.Parameters()) {
- return false
- }
- for i, p1 := range t1.Parameters() {
- if !isEqualOrLessSpecific(p1, t2.Parameters()[i]) {
- return false
- }
- }
- return true
- case types.ListKind:
- return isEqualOrLessSpecific(t1.Parameters()[0], t2.Parameters()[0])
- case types.MapKind:
- return isEqualOrLessSpecific(t1.Parameters()[0], t2.Parameters()[0]) &&
- isEqualOrLessSpecific(t1.Parameters()[1], t2.Parameters()[1])
- case types.TypeKind:
- return true
- default:
- return t1.IsExactType(t2)
- }
-}
-
-// / internalIsAssignable returns true if t1 is assignable to t2.
-func internalIsAssignable(m *mapping, t1, t2 *types.Type) bool {
- // Process type parameters.
- kind1, kind2 := t1.Kind(), t2.Kind()
- if kind2 == types.TypeParamKind {
- // If t2 is a valid type substitution for t1, return true.
- valid, t2HasSub := isValidTypeSubstitution(m, t1, t2)
- if valid {
- return true
- }
- // If t2 is not a valid type sub for t1, and already has a known substitution return false
- // since it is not possible for t1 to be a substitution for t2.
- if !valid && t2HasSub {
- return false
- }
- // Otherwise, fall through to check whether t1 is a possible substitution for t2.
- }
- if kind1 == types.TypeParamKind {
- // Return whether t1 is a valid substitution for t2. If not, do no additional checks as the
- // possible type substitutions have been searched in both directions.
- valid, _ := isValidTypeSubstitution(m, t2, t1)
- return valid
- }
-
- // Next check for wildcard types.
- if isDynOrError(t1) || isDynOrError(t2) {
- return true
- }
- // Preserve the nullness checks of the legacy type-checker.
- if kind1 == types.NullTypeKind {
- return internalIsAssignableNull(t2)
- }
- if kind2 == types.NullTypeKind {
- return internalIsAssignableNull(t1)
- }
-
- // Test for when the types do not need to agree, but are more specific than dyn.
- switch kind1 {
- case types.BoolKind, types.BytesKind, types.DoubleKind, types.IntKind, types.StringKind, types.UintKind,
- types.AnyKind, types.DurationKind, types.TimestampKind,
- types.StructKind:
- return t1.IsAssignableType(t2)
- case types.TypeKind:
- return kind2 == types.TypeKind
- case types.OpaqueKind, types.ListKind, types.MapKind:
- return t1.Kind() == t2.Kind() && t1.TypeName() == t2.TypeName() &&
- internalIsAssignableList(m, t1.Parameters(), t2.Parameters())
- default:
- return false
- }
-}
-
-// isValidTypeSubstitution returns whether t2 (or its type substitution) is a valid type
-// substitution for t1, and whether t2 has a type substitution in mapping m.
-//
-// The type t2 is a valid substitution for t1 if any of the following statements is true
-// - t2 has a type substitution (t2sub) equal to t1
-// - t2 has a type substitution (t2sub) assignable to t1
-// - t2 does not occur within t1.
-func isValidTypeSubstitution(m *mapping, t1, t2 *types.Type) (valid, hasSub bool) {
- // Early return if the t1 and t2 are the same instance.
- kind1, kind2 := t1.Kind(), t2.Kind()
- if kind1 == kind2 && t1.IsExactType(t2) {
- return true, true
- }
- if t2Sub, found := m.find(t2); found {
- // Early return if t1 and t2Sub are the same instance as otherwise the mapping
- // might mark a type as being a subtitution for itself.
- if kind1 == t2Sub.Kind() && t1.IsExactType(t2Sub) {
- return true, true
- }
- // If the types are compatible, pick the more general type and return true
- if internalIsAssignable(m, t1, t2Sub) {
- t2New := mostGeneral(t1, t2Sub)
- // only update the type reference map if the target type does not occur within it.
- if notReferencedIn(m, t2, t2New) {
- m.add(t2, t2New)
- }
- // acknowledge the type agreement, and that the substitution is already tracked.
- return true, true
- }
- return false, true
- }
- if notReferencedIn(m, t2, t1) {
- m.add(t2, t1)
- return true, false
- }
- return false, false
-}
-
-// internalIsAssignableList returns true if the element types at each index in the list are
-// assignable from l1[i] to l2[i]. The list lengths must also agree for the lists to be
-// assignable.
-func internalIsAssignableList(m *mapping, l1, l2 []*types.Type) bool {
- if len(l1) != len(l2) {
- return false
- }
- for i, t1 := range l1 {
- if !internalIsAssignable(m, t1, l2[i]) {
- return false
- }
- }
- return true
-}
-
-// internalIsAssignableNull returns true if the type is nullable.
-func internalIsAssignableNull(t *types.Type) bool {
- return isLegacyNullable(t) || t.IsAssignableType(types.NullType)
-}
-
-// isLegacyNullable preserves the null-ness compatibility of the original type-checker implementation.
-func isLegacyNullable(t *types.Type) bool {
- switch t.Kind() {
- case types.OpaqueKind, types.StructKind, types.AnyKind, types.DurationKind, types.TimestampKind:
- return true
- }
- return false
-}
-
-// isAssignable returns an updated type substitution mapping if t1 is assignable to t2.
-func isAssignable(m *mapping, t1, t2 *types.Type) *mapping {
- mCopy := m.copy()
- if internalIsAssignable(mCopy, t1, t2) {
- return mCopy
- }
- return nil
-}
-
-// isAssignableList returns an updated type substitution mapping if l1 is assignable to l2.
-func isAssignableList(m *mapping, l1, l2 []*types.Type) *mapping {
- mCopy := m.copy()
- if internalIsAssignableList(mCopy, l1, l2) {
- return mCopy
- }
- return nil
-}
-
-// mostGeneral returns the more general of two types which are known to unify.
-func mostGeneral(t1, t2 *types.Type) *types.Type {
- if isEqualOrLessSpecific(t1, t2) {
- return t1
- }
- return t2
-}
-
-// notReferencedIn checks whether the type doesn't appear directly or transitively within the other
-// type. This is a standard requirement for type unification, commonly referred to as the "occurs
-// check".
-func notReferencedIn(m *mapping, t, withinType *types.Type) bool {
- if t.IsExactType(withinType) {
- return false
- }
- withinKind := withinType.Kind()
- switch withinKind {
- case types.TypeParamKind:
- wtSub, found := m.find(withinType)
- if !found {
- return true
- }
- return notReferencedIn(m, t, wtSub)
- case types.OpaqueKind, types.ListKind, types.MapKind:
- for _, pt := range withinType.Parameters() {
- if !notReferencedIn(m, t, pt) {
- return false
- }
- }
- return true
- default:
- return true
- }
-}
-
-// substitute replaces all direct and indirect occurrences of bound type parameters. Unbound type
-// parameters are replaced by DYN if typeParamToDyn is true.
-func substitute(m *mapping, t *types.Type, typeParamToDyn bool) *types.Type {
- if tSub, found := m.find(t); found {
- return substitute(m, tSub, typeParamToDyn)
- }
- kind := t.Kind()
- if typeParamToDyn && kind == types.TypeParamKind {
- return types.DynType
- }
- switch kind {
- case types.OpaqueKind:
- return types.NewOpaqueType(t.TypeName(), substituteParams(m, t.Parameters(), typeParamToDyn)...)
- case types.ListKind:
- return types.NewListType(substitute(m, t.Parameters()[0], typeParamToDyn))
- case types.MapKind:
- return types.NewMapType(substitute(m, t.Parameters()[0], typeParamToDyn),
- substitute(m, t.Parameters()[1], typeParamToDyn))
- case types.TypeKind:
- if len(t.Parameters()) > 0 {
- return types.NewTypeTypeWithParam(substitute(m, t.Parameters()[0], typeParamToDyn))
- }
- return t
- default:
- return t
- }
-}
-
-func substituteParams(m *mapping, typeParams []*types.Type, typeParamToDyn bool) []*types.Type {
- subParams := make([]*types.Type, len(typeParams))
- for i, tp := range typeParams {
- subParams[i] = substitute(m, tp, typeParamToDyn)
- }
- return subParams
-}
-
-func newFunctionType(resultType *types.Type, argTypes ...*types.Type) *types.Type {
- return types.NewOpaqueType("function", append([]*types.Type{resultType}, argTypes...)...)
-}
diff --git a/vendor/github.com/google/cel-go/common/BUILD.bazel b/vendor/github.com/google/cel-go/common/BUILD.bazel
deleted file mode 100644
index d6165b13a..000000000
--- a/vendor/github.com/google/cel-go/common/BUILD.bazel
+++ /dev/null
@@ -1,35 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "cost.go",
- "error.go",
- "errors.go",
- "location.go",
- "source.go",
- ],
- importpath = "github.com/google/cel-go/common",
- deps = [
- "//common/runes:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_x_text//width:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = [
- "errors_test.go",
- "source_test.go",
- ],
- embed = [
- ":go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/ast/BUILD.bazel b/vendor/github.com/google/cel-go/common/ast/BUILD.bazel
deleted file mode 100644
index c92a0f179..000000000
--- a/vendor/github.com/google/cel-go/common/ast/BUILD.bazel
+++ /dev/null
@@ -1,61 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = [
- "//cel:__subpackages__",
- "//checker:__subpackages__",
- "//common:__subpackages__",
- "//ext:__subpackages__",
- "//interpreter:__subpackages__",
- "//parser:__subpackages__",
- ],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "ast.go",
- "conversion.go",
- "expr.go",
- "factory.go",
- "navigable.go",
- ],
- importpath = "github.com/google/cel-go/common/ast",
- deps = [
- "//common:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//types/known/structpb:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- srcs = [
- "ast_test.go",
- "conversion_test.go",
- "expr_test.go",
- "navigable_test.go",
- ],
- embed = [
- ":go_default_library",
- ],
- deps = [
- "//checker:go_default_library",
- "//checker/decls:go_default_library",
- "//common:go_default_library",
- "//common/containers:go_default_library",
- "//common/decls:go_default_library",
- "//common/overloads:go_default_library",
- "//common/stdlib:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- "//parser:go_default_library",
- "//test/proto3pb:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//encoding/prototext:go_default_library",
- ],
-)
\ No newline at end of file
diff --git a/vendor/github.com/google/cel-go/common/ast/ast.go b/vendor/github.com/google/cel-go/common/ast/ast.go
deleted file mode 100644
index 4feddaa3a..000000000
--- a/vendor/github.com/google/cel-go/common/ast/ast.go
+++ /dev/null
@@ -1,450 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package ast declares data structures useful for parsed and checked abstract syntax trees
-package ast
-
-import (
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-// AST contains a protobuf expression and source info along with CEL-native type and reference information.
-type AST struct {
- expr Expr
- sourceInfo *SourceInfo
- typeMap map[int64]*types.Type
- refMap map[int64]*ReferenceInfo
-}
-
-// Expr returns the root ast.Expr value in the AST.
-func (a *AST) Expr() Expr {
- if a == nil {
- return nilExpr
- }
- return a.expr
-}
-
-// SourceInfo returns the source metadata associated with the parse / type-check passes.
-func (a *AST) SourceInfo() *SourceInfo {
- if a == nil {
- return nil
- }
- return a.sourceInfo
-}
-
-// GetType returns the type for the expression at the given id, if one exists, else types.DynType.
-func (a *AST) GetType(id int64) *types.Type {
- if t, found := a.TypeMap()[id]; found {
- return t
- }
- return types.DynType
-}
-
-// SetType sets the type of the expression node at the given id.
-func (a *AST) SetType(id int64, t *types.Type) {
- if a == nil {
- return
- }
- a.typeMap[id] = t
-}
-
-// TypeMap returns the map of expression ids to type-checked types.
-//
-// If the AST is not type-checked, the map will be empty.
-func (a *AST) TypeMap() map[int64]*types.Type {
- if a == nil {
- return map[int64]*types.Type{}
- }
- return a.typeMap
-}
-
-// GetOverloadIDs returns the set of overload function names for a given expression id.
-//
-// If the expression id is not a function call, or the AST is not type-checked, the result will be empty.
-func (a *AST) GetOverloadIDs(id int64) []string {
- if ref, found := a.ReferenceMap()[id]; found {
- return ref.OverloadIDs
- }
- return []string{}
-}
-
-// ReferenceMap returns the map of expression id to identifier, constant, and function references.
-func (a *AST) ReferenceMap() map[int64]*ReferenceInfo {
- if a == nil {
- return map[int64]*ReferenceInfo{}
- }
- return a.refMap
-}
-
-// SetReference adds a reference to the checked AST type map.
-func (a *AST) SetReference(id int64, r *ReferenceInfo) {
- if a == nil {
- return
- }
- a.refMap[id] = r
-}
-
-// IsChecked returns whether the AST is type-checked.
-func (a *AST) IsChecked() bool {
- return a != nil && len(a.TypeMap()) > 0
-}
-
-// NewAST creates a base AST instance with an ast.Expr and ast.SourceInfo value.
-func NewAST(e Expr, sourceInfo *SourceInfo) *AST {
- if e == nil {
- e = nilExpr
- }
- return &AST{
- expr: e,
- sourceInfo: sourceInfo,
- typeMap: make(map[int64]*types.Type),
- refMap: make(map[int64]*ReferenceInfo),
- }
-}
-
-// NewCheckedAST wraps an parsed AST and augments it with type and reference metadata.
-func NewCheckedAST(parsed *AST, typeMap map[int64]*types.Type, refMap map[int64]*ReferenceInfo) *AST {
- return &AST{
- expr: parsed.Expr(),
- sourceInfo: parsed.SourceInfo(),
- typeMap: typeMap,
- refMap: refMap,
- }
-}
-
-// Copy creates a deep copy of the Expr and SourceInfo values in the input AST.
-//
-// Copies of the Expr value are generated using an internal default ExprFactory.
-func Copy(a *AST) *AST {
- if a == nil {
- return nil
- }
- e := defaultFactory.CopyExpr(a.expr)
- if !a.IsChecked() {
- return NewAST(e, CopySourceInfo(a.SourceInfo()))
- }
- typesCopy := make(map[int64]*types.Type, len(a.typeMap))
- for id, t := range a.typeMap {
- typesCopy[id] = t
- }
- refsCopy := make(map[int64]*ReferenceInfo, len(a.refMap))
- for id, r := range a.refMap {
- refsCopy[id] = r
- }
- return NewCheckedAST(NewAST(e, CopySourceInfo(a.SourceInfo())), typesCopy, refsCopy)
-}
-
-// MaxID returns the upper-bound, non-inclusive, of ids present within the AST's Expr value.
-func MaxID(a *AST) int64 {
- visitor := &maxIDVisitor{maxID: 1}
- PostOrderVisit(a.Expr(), visitor)
- return visitor.maxID + 1
-}
-
-// NewSourceInfo creates a simple SourceInfo object from an input common.Source value.
-func NewSourceInfo(src common.Source) *SourceInfo {
- var lineOffsets []int32
- var desc string
- baseLine := int32(0)
- baseCol := int32(0)
- if src != nil {
- desc = src.Description()
- lineOffsets = src.LineOffsets()
- // Determine whether the source metadata should be computed relative
- // to a base line and column value. This can be determined by requesting
- // the location for offset 0 from the source object.
- if loc, found := src.OffsetLocation(0); found {
- baseLine = int32(loc.Line()) - 1
- baseCol = int32(loc.Column())
- }
- }
- return &SourceInfo{
- desc: desc,
- lines: lineOffsets,
- baseLine: baseLine,
- baseCol: baseCol,
- offsetRanges: make(map[int64]OffsetRange),
- macroCalls: make(map[int64]Expr),
- }
-}
-
-// CopySourceInfo creates a deep copy of the MacroCalls within the input SourceInfo.
-//
-// Copies of macro Expr values are generated using an internal default ExprFactory.
-func CopySourceInfo(info *SourceInfo) *SourceInfo {
- if info == nil {
- return nil
- }
- rangesCopy := make(map[int64]OffsetRange, len(info.offsetRanges))
- for id, off := range info.offsetRanges {
- rangesCopy[id] = off
- }
- callsCopy := make(map[int64]Expr, len(info.macroCalls))
- for id, call := range info.macroCalls {
- callsCopy[id] = defaultFactory.CopyExpr(call)
- }
- return &SourceInfo{
- syntax: info.syntax,
- desc: info.desc,
- lines: info.lines,
- baseLine: info.baseLine,
- baseCol: info.baseCol,
- offsetRanges: rangesCopy,
- macroCalls: callsCopy,
- }
-}
-
-// SourceInfo records basic information about the expression as a textual input and
-// as a parsed expression value.
-type SourceInfo struct {
- syntax string
- desc string
- lines []int32
- baseLine int32
- baseCol int32
- offsetRanges map[int64]OffsetRange
- macroCalls map[int64]Expr
-}
-
-// SyntaxVersion returns the syntax version associated with the text expression.
-func (s *SourceInfo) SyntaxVersion() string {
- if s == nil {
- return ""
- }
- return s.syntax
-}
-
-// Description provides information about where the expression came from.
-func (s *SourceInfo) Description() string {
- if s == nil {
- return ""
- }
- return s.desc
-}
-
-// LineOffsets returns a list of the 0-based character offsets in the input text where newlines appear.
-func (s *SourceInfo) LineOffsets() []int32 {
- if s == nil {
- return []int32{}
- }
- return s.lines
-}
-
-// MacroCalls returns a map of expression id to ast.Expr value where the id represents the expression
-// node where the macro was inserted into the AST, and the ast.Expr value represents the original call
-// signature which was replaced.
-func (s *SourceInfo) MacroCalls() map[int64]Expr {
- if s == nil {
- return map[int64]Expr{}
- }
- return s.macroCalls
-}
-
-// GetMacroCall returns the original ast.Expr value for the given expression if it was generated via
-// a macro replacement.
-//
-// Note, parsing options must be enabled to track macro calls before this method will return a value.
-func (s *SourceInfo) GetMacroCall(id int64) (Expr, bool) {
- e, found := s.MacroCalls()[id]
- return e, found
-}
-
-// SetMacroCall records a macro call at a specific location.
-func (s *SourceInfo) SetMacroCall(id int64, e Expr) {
- if s != nil {
- s.macroCalls[id] = e
- }
-}
-
-// ClearMacroCall removes the macro call at the given expression id.
-func (s *SourceInfo) ClearMacroCall(id int64) {
- if s != nil {
- delete(s.macroCalls, id)
- }
-}
-
-// OffsetRanges returns a map of expression id to OffsetRange values where the range indicates either:
-// the start and end position in the input stream where the expression occurs, or the start position
-// only. If the range only captures start position, the stop position of the range will be equal to
-// the start.
-func (s *SourceInfo) OffsetRanges() map[int64]OffsetRange {
- if s == nil {
- return map[int64]OffsetRange{}
- }
- return s.offsetRanges
-}
-
-// GetOffsetRange retrieves an OffsetRange for the given expression id if one exists.
-func (s *SourceInfo) GetOffsetRange(id int64) (OffsetRange, bool) {
- if s == nil {
- return OffsetRange{}, false
- }
- o, found := s.offsetRanges[id]
- return o, found
-}
-
-// SetOffsetRange sets the OffsetRange for the given expression id.
-func (s *SourceInfo) SetOffsetRange(id int64, o OffsetRange) {
- if s == nil {
- return
- }
- s.offsetRanges[id] = o
-}
-
-// GetStartLocation calculates the human-readable 1-based line and 0-based column of the first character
-// of the expression node at the id.
-func (s *SourceInfo) GetStartLocation(id int64) common.Location {
- if o, found := s.GetOffsetRange(id); found {
- line := 1
- col := int(o.Start)
- for _, lineOffset := range s.LineOffsets() {
- if lineOffset < o.Start {
- line++
- col = int(o.Start - lineOffset)
- } else {
- break
- }
- }
- return common.NewLocation(line, col)
- }
- return common.NoLocation
-}
-
-// GetStopLocation calculates the human-readable 1-based line and 0-based column of the last character for
-// the expression node at the given id.
-//
-// If the SourceInfo was generated from a serialized protobuf representation, the stop location will
-// be identical to the start location for the expression.
-func (s *SourceInfo) GetStopLocation(id int64) common.Location {
- if o, found := s.GetOffsetRange(id); found {
- line := 1
- col := int(o.Stop)
- for _, lineOffset := range s.LineOffsets() {
- if lineOffset < o.Stop {
- line++
- col = int(o.Stop - lineOffset)
- } else {
- break
- }
- }
- return common.NewLocation(line, col)
- }
- return common.NoLocation
-}
-
-// ComputeOffset calculates the 0-based character offset from a 1-based line and 0-based column.
-func (s *SourceInfo) ComputeOffset(line, col int32) int32 {
- if s != nil {
- line = s.baseLine + line
- col = s.baseCol + col
- }
- if line == 1 {
- return col
- }
- if line < 1 || line > int32(len(s.LineOffsets())) {
- return -1
- }
- offset := s.LineOffsets()[line-2]
- return offset + col
-}
-
-// OffsetRange captures the start and stop positions of a section of text in the input expression.
-type OffsetRange struct {
- Start int32
- Stop int32
-}
-
-// ReferenceInfo contains a CEL native representation of an identifier reference which may refer to
-// either a qualified identifier name, a set of overload ids, or a constant value from an enum.
-type ReferenceInfo struct {
- Name string
- OverloadIDs []string
- Value ref.Val
-}
-
-// NewIdentReference creates a ReferenceInfo instance for an identifier with an optional constant value.
-func NewIdentReference(name string, value ref.Val) *ReferenceInfo {
- return &ReferenceInfo{Name: name, Value: value}
-}
-
-// NewFunctionReference creates a ReferenceInfo instance for a set of function overloads.
-func NewFunctionReference(overloads ...string) *ReferenceInfo {
- info := &ReferenceInfo{}
- for _, id := range overloads {
- info.AddOverload(id)
- }
- return info
-}
-
-// AddOverload appends a function overload ID to the ReferenceInfo.
-func (r *ReferenceInfo) AddOverload(overloadID string) {
- for _, id := range r.OverloadIDs {
- if id == overloadID {
- return
- }
- }
- r.OverloadIDs = append(r.OverloadIDs, overloadID)
-}
-
-// Equals returns whether two references are identical to each other.
-func (r *ReferenceInfo) Equals(other *ReferenceInfo) bool {
- if r.Name != other.Name {
- return false
- }
- if len(r.OverloadIDs) != len(other.OverloadIDs) {
- return false
- }
- if len(r.OverloadIDs) != 0 {
- overloadMap := make(map[string]struct{}, len(r.OverloadIDs))
- for _, id := range r.OverloadIDs {
- overloadMap[id] = struct{}{}
- }
- for _, id := range other.OverloadIDs {
- _, found := overloadMap[id]
- if !found {
- return false
- }
- }
- }
- if r.Value == nil && other.Value == nil {
- return true
- }
- if r.Value == nil && other.Value != nil ||
- r.Value != nil && other.Value == nil ||
- r.Value.Equal(other.Value) != types.True {
- return false
- }
- return true
-}
-
-type maxIDVisitor struct {
- maxID int64
- *baseVisitor
-}
-
-// VisitExpr updates the max identifier if the incoming expression id is greater than previously observed.
-func (v *maxIDVisitor) VisitExpr(e Expr) {
- if v.maxID < e.ID() {
- v.maxID = e.ID()
- }
-}
-
-// VisitEntryExpr updates the max identifier if the incoming entry id is greater than previously observed.
-func (v *maxIDVisitor) VisitEntryExpr(e EntryExpr) {
- if v.maxID < e.ID() {
- v.maxID = e.ID()
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/ast/conversion.go b/vendor/github.com/google/cel-go/common/ast/conversion.go
deleted file mode 100644
index 8f2c4bd1e..000000000
--- a/vendor/github.com/google/cel-go/common/ast/conversion.go
+++ /dev/null
@@ -1,632 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ast
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-
- structpb "google.golang.org/protobuf/types/known/structpb"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// ToProto converts an AST to a CheckedExpr protobouf.
-func ToProto(ast *AST) (*exprpb.CheckedExpr, error) {
- refMap := make(map[int64]*exprpb.Reference, len(ast.ReferenceMap()))
- for id, ref := range ast.ReferenceMap() {
- r, err := ReferenceInfoToProto(ref)
- if err != nil {
- return nil, err
- }
- refMap[id] = r
- }
- typeMap := make(map[int64]*exprpb.Type, len(ast.TypeMap()))
- for id, typ := range ast.TypeMap() {
- t, err := types.TypeToExprType(typ)
- if err != nil {
- return nil, err
- }
- typeMap[id] = t
- }
- e, err := ExprToProto(ast.Expr())
- if err != nil {
- return nil, err
- }
- info, err := SourceInfoToProto(ast.SourceInfo())
- if err != nil {
- return nil, err
- }
- return &exprpb.CheckedExpr{
- Expr: e,
- SourceInfo: info,
- ReferenceMap: refMap,
- TypeMap: typeMap,
- }, nil
-}
-
-// ToAST converts a CheckedExpr protobuf to an AST instance.
-func ToAST(checked *exprpb.CheckedExpr) (*AST, error) {
- refMap := make(map[int64]*ReferenceInfo, len(checked.GetReferenceMap()))
- for id, ref := range checked.GetReferenceMap() {
- r, err := ProtoToReferenceInfo(ref)
- if err != nil {
- return nil, err
- }
- refMap[id] = r
- }
- typeMap := make(map[int64]*types.Type, len(checked.GetTypeMap()))
- for id, typ := range checked.GetTypeMap() {
- t, err := types.ExprTypeToType(typ)
- if err != nil {
- return nil, err
- }
- typeMap[id] = t
- }
- info, err := ProtoToSourceInfo(checked.GetSourceInfo())
- if err != nil {
- return nil, err
- }
- root, err := ProtoToExpr(checked.GetExpr())
- if err != nil {
- return nil, err
- }
- ast := NewCheckedAST(NewAST(root, info), typeMap, refMap)
- return ast, nil
-}
-
-// ProtoToExpr converts a protobuf Expr value to an ast.Expr value.
-func ProtoToExpr(e *exprpb.Expr) (Expr, error) {
- factory := NewExprFactory()
- return exprInternal(factory, e)
-}
-
-// ProtoToEntryExpr converts a protobuf struct/map entry to an ast.EntryExpr
-func ProtoToEntryExpr(e *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) {
- factory := NewExprFactory()
- switch e.GetKeyKind().(type) {
- case *exprpb.Expr_CreateStruct_Entry_FieldKey:
- return exprStructField(factory, e.GetId(), e)
- case *exprpb.Expr_CreateStruct_Entry_MapKey:
- return exprMapEntry(factory, e.GetId(), e)
- }
- return nil, fmt.Errorf("unsupported expr entry kind: %v", e)
-}
-
-func exprInternal(factory ExprFactory, e *exprpb.Expr) (Expr, error) {
- id := e.GetId()
- switch e.GetExprKind().(type) {
- case *exprpb.Expr_CallExpr:
- return exprCall(factory, id, e.GetCallExpr())
- case *exprpb.Expr_ComprehensionExpr:
- return exprComprehension(factory, id, e.GetComprehensionExpr())
- case *exprpb.Expr_ConstExpr:
- return exprLiteral(factory, id, e.GetConstExpr())
- case *exprpb.Expr_IdentExpr:
- return exprIdent(factory, id, e.GetIdentExpr())
- case *exprpb.Expr_ListExpr:
- return exprList(factory, id, e.GetListExpr())
- case *exprpb.Expr_SelectExpr:
- return exprSelect(factory, id, e.GetSelectExpr())
- case *exprpb.Expr_StructExpr:
- s := e.GetStructExpr()
- if s.GetMessageName() != "" {
- return exprStruct(factory, id, s)
- }
- return exprMap(factory, id, s)
- }
- return factory.NewUnspecifiedExpr(id), nil
-}
-
-func exprCall(factory ExprFactory, id int64, call *exprpb.Expr_Call) (Expr, error) {
- var err error
- args := make([]Expr, len(call.GetArgs()))
- for i, a := range call.GetArgs() {
- args[i], err = exprInternal(factory, a)
- if err != nil {
- return nil, err
- }
- }
- if call.GetTarget() == nil {
- return factory.NewCall(id, call.GetFunction(), args...), nil
- }
-
- target, err := exprInternal(factory, call.GetTarget())
- if err != nil {
- return nil, err
- }
- return factory.NewMemberCall(id, call.GetFunction(), target, args...), nil
-}
-
-func exprComprehension(factory ExprFactory, id int64, comp *exprpb.Expr_Comprehension) (Expr, error) {
- iterRange, err := exprInternal(factory, comp.GetIterRange())
- if err != nil {
- return nil, err
- }
- accuInit, err := exprInternal(factory, comp.GetAccuInit())
- if err != nil {
- return nil, err
- }
- loopCond, err := exprInternal(factory, comp.GetLoopCondition())
- if err != nil {
- return nil, err
- }
- loopStep, err := exprInternal(factory, comp.GetLoopStep())
- if err != nil {
- return nil, err
- }
- result, err := exprInternal(factory, comp.GetResult())
- if err != nil {
- return nil, err
- }
- return factory.NewComprehension(id,
- iterRange,
- comp.GetIterVar(),
- comp.GetAccuVar(),
- accuInit,
- loopCond,
- loopStep,
- result), nil
-}
-
-func exprLiteral(factory ExprFactory, id int64, c *exprpb.Constant) (Expr, error) {
- val, err := ConstantToVal(c)
- if err != nil {
- return nil, err
- }
- return factory.NewLiteral(id, val), nil
-}
-
-func exprIdent(factory ExprFactory, id int64, i *exprpb.Expr_Ident) (Expr, error) {
- return factory.NewIdent(id, i.GetName()), nil
-}
-
-func exprList(factory ExprFactory, id int64, l *exprpb.Expr_CreateList) (Expr, error) {
- elems := make([]Expr, len(l.GetElements()))
- for i, e := range l.GetElements() {
- elem, err := exprInternal(factory, e)
- if err != nil {
- return nil, err
- }
- elems[i] = elem
- }
- return factory.NewList(id, elems, l.GetOptionalIndices()), nil
-}
-
-func exprMap(factory ExprFactory, id int64, s *exprpb.Expr_CreateStruct) (Expr, error) {
- entries := make([]EntryExpr, len(s.GetEntries()))
- var err error
- for i, entry := range s.GetEntries() {
- entries[i], err = exprMapEntry(factory, entry.GetId(), entry)
- if err != nil {
- return nil, err
- }
- }
- return factory.NewMap(id, entries), nil
-}
-
-func exprMapEntry(factory ExprFactory, id int64, e *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) {
- k, err := exprInternal(factory, e.GetMapKey())
- if err != nil {
- return nil, err
- }
- v, err := exprInternal(factory, e.GetValue())
- if err != nil {
- return nil, err
- }
- return factory.NewMapEntry(id, k, v, e.GetOptionalEntry()), nil
-}
-
-func exprSelect(factory ExprFactory, id int64, s *exprpb.Expr_Select) (Expr, error) {
- op, err := exprInternal(factory, s.GetOperand())
- if err != nil {
- return nil, err
- }
- if s.GetTestOnly() {
- return factory.NewPresenceTest(id, op, s.GetField()), nil
- }
- return factory.NewSelect(id, op, s.GetField()), nil
-}
-
-func exprStruct(factory ExprFactory, id int64, s *exprpb.Expr_CreateStruct) (Expr, error) {
- fields := make([]EntryExpr, len(s.GetEntries()))
- var err error
- for i, field := range s.GetEntries() {
- fields[i], err = exprStructField(factory, field.GetId(), field)
- if err != nil {
- return nil, err
- }
- }
- return factory.NewStruct(id, s.GetMessageName(), fields), nil
-}
-
-func exprStructField(factory ExprFactory, id int64, f *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) {
- v, err := exprInternal(factory, f.GetValue())
- if err != nil {
- return nil, err
- }
- return factory.NewStructField(id, f.GetFieldKey(), v, f.GetOptionalEntry()), nil
-}
-
-// ExprToProto serializes an ast.Expr value to a protobuf Expr representation.
-func ExprToProto(e Expr) (*exprpb.Expr, error) {
- if e == nil {
- return &exprpb.Expr{}, nil
- }
- switch e.Kind() {
- case CallKind:
- return protoCall(e.ID(), e.AsCall())
- case ComprehensionKind:
- return protoComprehension(e.ID(), e.AsComprehension())
- case IdentKind:
- return protoIdent(e.ID(), e.AsIdent())
- case ListKind:
- return protoList(e.ID(), e.AsList())
- case LiteralKind:
- return protoLiteral(e.ID(), e.AsLiteral())
- case MapKind:
- return protoMap(e.ID(), e.AsMap())
- case SelectKind:
- return protoSelect(e.ID(), e.AsSelect())
- case StructKind:
- return protoStruct(e.ID(), e.AsStruct())
- case UnspecifiedExprKind:
- // Handle the case where a macro reference may be getting translated.
- // A nested macro 'pointer' is a non-zero expression id with no kind set.
- if e.ID() != 0 {
- return &exprpb.Expr{Id: e.ID()}, nil
- }
- return &exprpb.Expr{}, nil
- }
- return nil, fmt.Errorf("unsupported expr kind: %v", e)
-}
-
-// EntryExprToProto converts an ast.EntryExpr to a protobuf CreateStruct entry
-func EntryExprToProto(e EntryExpr) (*exprpb.Expr_CreateStruct_Entry, error) {
- switch e.Kind() {
- case MapEntryKind:
- return protoMapEntry(e.ID(), e.AsMapEntry())
- case StructFieldKind:
- return protoStructField(e.ID(), e.AsStructField())
- case UnspecifiedEntryExprKind:
- return &exprpb.Expr_CreateStruct_Entry{}, nil
- }
- return nil, fmt.Errorf("unsupported expr entry kind: %v", e)
-}
-
-func protoCall(id int64, call CallExpr) (*exprpb.Expr, error) {
- var err error
- var target *exprpb.Expr
- if call.IsMemberFunction() {
- target, err = ExprToProto(call.Target())
- if err != nil {
- return nil, err
- }
- }
- callArgs := call.Args()
- args := make([]*exprpb.Expr, len(callArgs))
- for i, a := range callArgs {
- args[i], err = ExprToProto(a)
- if err != nil {
- return nil, err
- }
- }
- return &exprpb.Expr{
- Id: id,
- ExprKind: &exprpb.Expr_CallExpr{
- CallExpr: &exprpb.Expr_Call{
- Function: call.FunctionName(),
- Target: target,
- Args: args,
- },
- },
- }, nil
-}
-
-func protoComprehension(id int64, comp ComprehensionExpr) (*exprpb.Expr, error) {
- iterRange, err := ExprToProto(comp.IterRange())
- if err != nil {
- return nil, err
- }
- accuInit, err := ExprToProto(comp.AccuInit())
- if err != nil {
- return nil, err
- }
- loopCond, err := ExprToProto(comp.LoopCondition())
- if err != nil {
- return nil, err
- }
- loopStep, err := ExprToProto(comp.LoopStep())
- if err != nil {
- return nil, err
- }
- result, err := ExprToProto(comp.Result())
- if err != nil {
- return nil, err
- }
- return &exprpb.Expr{
- Id: id,
- ExprKind: &exprpb.Expr_ComprehensionExpr{
- ComprehensionExpr: &exprpb.Expr_Comprehension{
- IterVar: comp.IterVar(),
- IterRange: iterRange,
- AccuVar: comp.AccuVar(),
- AccuInit: accuInit,
- LoopCondition: loopCond,
- LoopStep: loopStep,
- Result: result,
- },
- },
- }, nil
-}
-
-func protoIdent(id int64, name string) (*exprpb.Expr, error) {
- return &exprpb.Expr{
- Id: id,
- ExprKind: &exprpb.Expr_IdentExpr{
- IdentExpr: &exprpb.Expr_Ident{
- Name: name,
- },
- },
- }, nil
-}
-
-func protoList(id int64, list ListExpr) (*exprpb.Expr, error) {
- var err error
- elems := make([]*exprpb.Expr, list.Size())
- for i, e := range list.Elements() {
- elems[i], err = ExprToProto(e)
- if err != nil {
- return nil, err
- }
- }
- return &exprpb.Expr{
- Id: id,
- ExprKind: &exprpb.Expr_ListExpr{
- ListExpr: &exprpb.Expr_CreateList{
- Elements: elems,
- OptionalIndices: list.OptionalIndices(),
- },
- },
- }, nil
-}
-
-func protoLiteral(id int64, val ref.Val) (*exprpb.Expr, error) {
- c, err := ValToConstant(val)
- if err != nil {
- return nil, err
- }
- return &exprpb.Expr{
- Id: id,
- ExprKind: &exprpb.Expr_ConstExpr{
- ConstExpr: c,
- },
- }, nil
-}
-
-func protoMap(id int64, m MapExpr) (*exprpb.Expr, error) {
- entries := make([]*exprpb.Expr_CreateStruct_Entry, len(m.Entries()))
- var err error
- for i, e := range m.Entries() {
- entries[i], err = EntryExprToProto(e)
- if err != nil {
- return nil, err
- }
- }
- return &exprpb.Expr{
- Id: id,
- ExprKind: &exprpb.Expr_StructExpr{
- StructExpr: &exprpb.Expr_CreateStruct{
- Entries: entries,
- },
- },
- }, nil
-}
-
-func protoMapEntry(id int64, e MapEntry) (*exprpb.Expr_CreateStruct_Entry, error) {
- k, err := ExprToProto(e.Key())
- if err != nil {
- return nil, err
- }
- v, err := ExprToProto(e.Value())
- if err != nil {
- return nil, err
- }
- return &exprpb.Expr_CreateStruct_Entry{
- Id: id,
- KeyKind: &exprpb.Expr_CreateStruct_Entry_MapKey{
- MapKey: k,
- },
- Value: v,
- OptionalEntry: e.IsOptional(),
- }, nil
-}
-
-func protoSelect(id int64, s SelectExpr) (*exprpb.Expr, error) {
- op, err := ExprToProto(s.Operand())
- if err != nil {
- return nil, err
- }
- return &exprpb.Expr{
- Id: id,
- ExprKind: &exprpb.Expr_SelectExpr{
- SelectExpr: &exprpb.Expr_Select{
- Operand: op,
- Field: s.FieldName(),
- TestOnly: s.IsTestOnly(),
- },
- },
- }, nil
-}
-
-func protoStruct(id int64, s StructExpr) (*exprpb.Expr, error) {
- entries := make([]*exprpb.Expr_CreateStruct_Entry, len(s.Fields()))
- var err error
- for i, e := range s.Fields() {
- entries[i], err = EntryExprToProto(e)
- if err != nil {
- return nil, err
- }
- }
- return &exprpb.Expr{
- Id: id,
- ExprKind: &exprpb.Expr_StructExpr{
- StructExpr: &exprpb.Expr_CreateStruct{
- MessageName: s.TypeName(),
- Entries: entries,
- },
- },
- }, nil
-}
-
-func protoStructField(id int64, f StructField) (*exprpb.Expr_CreateStruct_Entry, error) {
- v, err := ExprToProto(f.Value())
- if err != nil {
- return nil, err
- }
- return &exprpb.Expr_CreateStruct_Entry{
- Id: id,
- KeyKind: &exprpb.Expr_CreateStruct_Entry_FieldKey{
- FieldKey: f.Name(),
- },
- Value: v,
- OptionalEntry: f.IsOptional(),
- }, nil
-}
-
-// SourceInfoToProto serializes an ast.SourceInfo value to a protobuf SourceInfo object.
-func SourceInfoToProto(info *SourceInfo) (*exprpb.SourceInfo, error) {
- if info == nil {
- return &exprpb.SourceInfo{}, nil
- }
- sourceInfo := &exprpb.SourceInfo{
- SyntaxVersion: info.SyntaxVersion(),
- Location: info.Description(),
- LineOffsets: info.LineOffsets(),
- Positions: make(map[int64]int32, len(info.OffsetRanges())),
- MacroCalls: make(map[int64]*exprpb.Expr, len(info.MacroCalls())),
- }
- for id, offset := range info.OffsetRanges() {
- sourceInfo.Positions[id] = offset.Start
- }
- for id, e := range info.MacroCalls() {
- call, err := ExprToProto(e)
- if err != nil {
- return nil, err
- }
- sourceInfo.MacroCalls[id] = call
- }
- return sourceInfo, nil
-}
-
-// ProtoToSourceInfo deserializes the protobuf into a native SourceInfo value.
-func ProtoToSourceInfo(info *exprpb.SourceInfo) (*SourceInfo, error) {
- sourceInfo := &SourceInfo{
- syntax: info.GetSyntaxVersion(),
- desc: info.GetLocation(),
- lines: info.GetLineOffsets(),
- offsetRanges: make(map[int64]OffsetRange, len(info.GetPositions())),
- macroCalls: make(map[int64]Expr, len(info.GetMacroCalls())),
- }
- for id, offset := range info.GetPositions() {
- sourceInfo.SetOffsetRange(id, OffsetRange{Start: offset, Stop: offset})
- }
- for id, e := range info.GetMacroCalls() {
- call, err := ProtoToExpr(e)
- if err != nil {
- return nil, err
- }
- sourceInfo.SetMacroCall(id, call)
- }
- return sourceInfo, nil
-}
-
-// ReferenceInfoToProto converts a ReferenceInfo instance to a protobuf Reference suitable for serialization.
-func ReferenceInfoToProto(info *ReferenceInfo) (*exprpb.Reference, error) {
- c, err := ValToConstant(info.Value)
- if err != nil {
- return nil, err
- }
- return &exprpb.Reference{
- Name: info.Name,
- OverloadId: info.OverloadIDs,
- Value: c,
- }, nil
-}
-
-// ProtoToReferenceInfo converts a protobuf Reference into a CEL-native ReferenceInfo instance.
-func ProtoToReferenceInfo(ref *exprpb.Reference) (*ReferenceInfo, error) {
- v, err := ConstantToVal(ref.GetValue())
- if err != nil {
- return nil, err
- }
- return &ReferenceInfo{
- Name: ref.GetName(),
- OverloadIDs: ref.GetOverloadId(),
- Value: v,
- }, nil
-}
-
-// ValToConstant converts a CEL-native ref.Val to a protobuf Constant.
-//
-// Only simple scalar types are supported by this method.
-func ValToConstant(v ref.Val) (*exprpb.Constant, error) {
- if v == nil {
- return nil, nil
- }
- switch v.Type() {
- case types.BoolType:
- return &exprpb.Constant{ConstantKind: &exprpb.Constant_BoolValue{BoolValue: v.Value().(bool)}}, nil
- case types.BytesType:
- return &exprpb.Constant{ConstantKind: &exprpb.Constant_BytesValue{BytesValue: v.Value().([]byte)}}, nil
- case types.DoubleType:
- return &exprpb.Constant{ConstantKind: &exprpb.Constant_DoubleValue{DoubleValue: v.Value().(float64)}}, nil
- case types.IntType:
- return &exprpb.Constant{ConstantKind: &exprpb.Constant_Int64Value{Int64Value: v.Value().(int64)}}, nil
- case types.NullType:
- return &exprpb.Constant{ConstantKind: &exprpb.Constant_NullValue{NullValue: structpb.NullValue_NULL_VALUE}}, nil
- case types.StringType:
- return &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: v.Value().(string)}}, nil
- case types.UintType:
- return &exprpb.Constant{ConstantKind: &exprpb.Constant_Uint64Value{Uint64Value: v.Value().(uint64)}}, nil
- }
- return nil, fmt.Errorf("unsupported constant kind: %v", v.Type())
-}
-
-// ConstantToVal converts a protobuf Constant to a CEL-native ref.Val.
-func ConstantToVal(c *exprpb.Constant) (ref.Val, error) {
- if c == nil {
- return nil, nil
- }
- switch c.GetConstantKind().(type) {
- case *exprpb.Constant_BoolValue:
- return types.Bool(c.GetBoolValue()), nil
- case *exprpb.Constant_BytesValue:
- return types.Bytes(c.GetBytesValue()), nil
- case *exprpb.Constant_DoubleValue:
- return types.Double(c.GetDoubleValue()), nil
- case *exprpb.Constant_Int64Value:
- return types.Int(c.GetInt64Value()), nil
- case *exprpb.Constant_NullValue:
- return types.NullValue, nil
- case *exprpb.Constant_StringValue:
- return types.String(c.GetStringValue()), nil
- case *exprpb.Constant_Uint64Value:
- return types.Uint(c.GetUint64Value()), nil
- }
- return nil, fmt.Errorf("unsupported constant kind: %v", c.GetConstantKind())
-}
diff --git a/vendor/github.com/google/cel-go/common/ast/expr.go b/vendor/github.com/google/cel-go/common/ast/expr.go
deleted file mode 100644
index c9d88bbaa..000000000
--- a/vendor/github.com/google/cel-go/common/ast/expr.go
+++ /dev/null
@@ -1,860 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ast
-
-import (
- "github.com/google/cel-go/common/types/ref"
-)
-
-// ExprKind represents the expression node kind.
-type ExprKind int
-
-const (
- // UnspecifiedExprKind represents an unset expression with no specified properties.
- UnspecifiedExprKind ExprKind = iota
-
- // CallKind represents a function call.
- CallKind
-
- // ComprehensionKind represents a comprehension expression generated by a macro.
- ComprehensionKind
-
- // IdentKind represents a simple variable, constant, or type identifier.
- IdentKind
-
- // ListKind represents a list literal expression.
- ListKind
-
- // LiteralKind represents a primitive scalar literal.
- LiteralKind
-
- // MapKind represents a map literal expression.
- MapKind
-
- // SelectKind represents a field selection expression.
- SelectKind
-
- // StructKind represents a struct literal expression.
- StructKind
-)
-
-// Expr represents the base expression node in a CEL abstract syntax tree.
-//
-// Depending on the `Kind()` value, the Expr may be converted to a concrete expression types
-// as indicated by the `As` methods.
-type Expr interface {
- // ID of the expression as it appears in the AST
- ID() int64
-
- // Kind of the expression node. See ExprKind for the valid enum values.
- Kind() ExprKind
-
- // AsCall adapts the expr into a CallExpr
- //
- // The Kind() must be equal to a CallKind for the conversion to be well-defined.
- AsCall() CallExpr
-
- // AsComprehension adapts the expr into a ComprehensionExpr.
- //
- // The Kind() must be equal to a ComprehensionKind for the conversion to be well-defined.
- AsComprehension() ComprehensionExpr
-
- // AsIdent adapts the expr into an identifier string.
- //
- // The Kind() must be equal to an IdentKind for the conversion to be well-defined.
- AsIdent() string
-
- // AsLiteral adapts the expr into a constant ref.Val.
- //
- // The Kind() must be equal to a LiteralKind for the conversion to be well-defined.
- AsLiteral() ref.Val
-
- // AsList adapts the expr into a ListExpr.
- //
- // The Kind() must be equal to a ListKind for the conversion to be well-defined.
- AsList() ListExpr
-
- // AsMap adapts the expr into a MapExpr.
- //
- // The Kind() must be equal to a MapKind for the conversion to be well-defined.
- AsMap() MapExpr
-
- // AsSelect adapts the expr into a SelectExpr.
- //
- // The Kind() must be equal to a SelectKind for the conversion to be well-defined.
- AsSelect() SelectExpr
-
- // AsStruct adapts the expr into a StructExpr.
- //
- // The Kind() must be equal to a StructKind for the conversion to be well-defined.
- AsStruct() StructExpr
-
- // RenumberIDs performs an in-place update of the expression and all of its descendents numeric ids.
- RenumberIDs(IDGenerator)
-
- // SetKindCase replaces the contents of the current expression with the contents of the other.
- //
- // The SetKindCase takes ownership of any expression instances references within the input Expr.
- // A shallow copy is made of the Expr value itself, but not a deep one.
- //
- // This method should only be used during AST rewrites using temporary Expr values.
- SetKindCase(Expr)
-
- // isExpr is a marker interface.
- isExpr()
-}
-
-// EntryExprKind represents the possible EntryExpr kinds.
-type EntryExprKind int
-
-const (
- // UnspecifiedEntryExprKind indicates that the entry expr is not set.
- UnspecifiedEntryExprKind EntryExprKind = iota
-
- // MapEntryKind indicates that the entry is a MapEntry type with key and value expressions.
- MapEntryKind
-
- // StructFieldKind indicates that the entry is a StructField with a field name and initializer
- // expression.
- StructFieldKind
-)
-
-// EntryExpr represents the base entry expression in a CEL map or struct literal.
-type EntryExpr interface {
- // ID of the entry as it appears in the AST.
- ID() int64
-
- // Kind of the entry expression node. See EntryExprKind for valid enum values.
- Kind() EntryExprKind
-
- // AsMapEntry casts the EntryExpr to a MapEntry.
- //
- // The Kind() must be equal to MapEntryKind for the conversion to be well-defined.
- AsMapEntry() MapEntry
-
- // AsStructField casts the EntryExpr to a StructField
- //
- // The Kind() must be equal to StructFieldKind for the conversion to be well-defined.
- AsStructField() StructField
-
- // RenumberIDs performs an in-place update of the expression and all of its descendents numeric ids.
- RenumberIDs(IDGenerator)
-
- isEntryExpr()
-}
-
-// IDGenerator produces unique ids suitable for tagging expression nodes
-type IDGenerator func(originalID int64) int64
-
-// CallExpr defines an interface for inspecting a function call and its arugments.
-type CallExpr interface {
- // FunctionName returns the name of the function.
- FunctionName() string
-
- // IsMemberFunction returns whether the call has a non-nil target indicating it is a member function
- IsMemberFunction() bool
-
- // Target returns the target of the expression if one is present.
- Target() Expr
-
- // Args returns the list of call arguments, excluding the target.
- Args() []Expr
-
- // marker interface method
- isExpr()
-}
-
-// ListExpr defines an interface for inspecting a list literal expression.
-type ListExpr interface {
- // Elements returns the list elements as navigable expressions.
- Elements() []Expr
-
- // OptionalIndicies returns the list of optional indices in the list literal.
- OptionalIndices() []int32
-
- // IsOptional indicates whether the given element index is optional.
- IsOptional(int32) bool
-
- // Size returns the number of elements in the list.
- Size() int
-
- // marker interface method
- isExpr()
-}
-
-// SelectExpr defines an interface for inspecting a select expression.
-type SelectExpr interface {
- // Operand returns the selection operand expression.
- Operand() Expr
-
- // FieldName returns the field name being selected from the operand.
- FieldName() string
-
- // IsTestOnly indicates whether the select expression is a presence test generated by a macro.
- IsTestOnly() bool
-
- // marker interface method
- isExpr()
-}
-
-// MapExpr defines an interface for inspecting a map expression.
-type MapExpr interface {
- // Entries returns the map key value pairs as EntryExpr values.
- Entries() []EntryExpr
-
- // Size returns the number of entries in the map.
- Size() int
-
- // marker interface method
- isExpr()
-}
-
-// MapEntry defines an interface for inspecting a map entry.
-type MapEntry interface {
- // Key returns the map entry key expression.
- Key() Expr
-
- // Value returns the map entry value expression.
- Value() Expr
-
- // IsOptional returns whether the entry is optional.
- IsOptional() bool
-
- // marker interface method
- isEntryExpr()
-}
-
-// StructExpr defines an interfaces for inspecting a struct and its field initializers.
-type StructExpr interface {
- // TypeName returns the struct type name.
- TypeName() string
-
- // Fields returns the set of field initializers in the struct expression as EntryExpr values.
- Fields() []EntryExpr
-
- // marker interface method
- isExpr()
-}
-
-// StructField defines an interface for inspecting a struct field initialization.
-type StructField interface {
- // Name returns the name of the field.
- Name() string
-
- // Value returns the field initialization expression.
- Value() Expr
-
- // IsOptional returns whether the field is optional.
- IsOptional() bool
-
- // marker interface method
- isEntryExpr()
-}
-
-// ComprehensionExpr defines an interface for inspecting a comprehension expression.
-type ComprehensionExpr interface {
- // IterRange returns the iteration range expression.
- IterRange() Expr
-
- // IterVar returns the iteration variable name.
- IterVar() string
-
- // AccuVar returns the accumulation variable name.
- AccuVar() string
-
- // AccuInit returns the accumulation variable initialization expression.
- AccuInit() Expr
-
- // LoopCondition returns the loop condition expression.
- LoopCondition() Expr
-
- // LoopStep returns the loop step expression.
- LoopStep() Expr
-
- // Result returns the comprehension result expression.
- Result() Expr
-
- // marker interface method
- isExpr()
-}
-
-var _ Expr = &expr{}
-
-type expr struct {
- id int64
- exprKindCase
-}
-
-type exprKindCase interface {
- Kind() ExprKind
-
- renumberIDs(IDGenerator)
-
- isExpr()
-}
-
-func (e *expr) ID() int64 {
- if e == nil {
- return 0
- }
- return e.id
-}
-
-func (e *expr) Kind() ExprKind {
- if e == nil || e.exprKindCase == nil {
- return UnspecifiedExprKind
- }
- return e.exprKindCase.Kind()
-}
-
-func (e *expr) AsCall() CallExpr {
- if e.Kind() != CallKind {
- return nilCall
- }
- return e.exprKindCase.(CallExpr)
-}
-
-func (e *expr) AsComprehension() ComprehensionExpr {
- if e.Kind() != ComprehensionKind {
- return nilCompre
- }
- return e.exprKindCase.(ComprehensionExpr)
-}
-
-func (e *expr) AsIdent() string {
- if e.Kind() != IdentKind {
- return ""
- }
- return string(e.exprKindCase.(baseIdentExpr))
-}
-
-func (e *expr) AsLiteral() ref.Val {
- if e.Kind() != LiteralKind {
- return nil
- }
- return e.exprKindCase.(*baseLiteral).Val
-}
-
-func (e *expr) AsList() ListExpr {
- if e.Kind() != ListKind {
- return nilList
- }
- return e.exprKindCase.(ListExpr)
-}
-
-func (e *expr) AsMap() MapExpr {
- if e.Kind() != MapKind {
- return nilMap
- }
- return e.exprKindCase.(MapExpr)
-}
-
-func (e *expr) AsSelect() SelectExpr {
- if e.Kind() != SelectKind {
- return nilSel
- }
- return e.exprKindCase.(SelectExpr)
-}
-
-func (e *expr) AsStruct() StructExpr {
- if e.Kind() != StructKind {
- return nilStruct
- }
- return e.exprKindCase.(StructExpr)
-}
-
-func (e *expr) SetKindCase(other Expr) {
- if e == nil {
- return
- }
- if other == nil {
- e.exprKindCase = nil
- return
- }
- switch other.Kind() {
- case CallKind:
- c := other.AsCall()
- e.exprKindCase = &baseCallExpr{
- function: c.FunctionName(),
- target: c.Target(),
- args: c.Args(),
- isMember: c.IsMemberFunction(),
- }
- case ComprehensionKind:
- c := other.AsComprehension()
- e.exprKindCase = &baseComprehensionExpr{
- iterRange: c.IterRange(),
- iterVar: c.IterVar(),
- accuVar: c.AccuVar(),
- accuInit: c.AccuInit(),
- loopCond: c.LoopCondition(),
- loopStep: c.LoopStep(),
- result: c.Result(),
- }
- case IdentKind:
- e.exprKindCase = baseIdentExpr(other.AsIdent())
- case ListKind:
- l := other.AsList()
- optIndexMap := make(map[int32]struct{}, len(l.OptionalIndices()))
- for _, idx := range l.OptionalIndices() {
- optIndexMap[idx] = struct{}{}
- }
- e.exprKindCase = &baseListExpr{
- elements: l.Elements(),
- optIndices: l.OptionalIndices(),
- optIndexMap: optIndexMap,
- }
- case LiteralKind:
- e.exprKindCase = &baseLiteral{Val: other.AsLiteral()}
- case MapKind:
- e.exprKindCase = &baseMapExpr{
- entries: other.AsMap().Entries(),
- }
- case SelectKind:
- s := other.AsSelect()
- e.exprKindCase = &baseSelectExpr{
- operand: s.Operand(),
- field: s.FieldName(),
- testOnly: s.IsTestOnly(),
- }
- case StructKind:
- s := other.AsStruct()
- e.exprKindCase = &baseStructExpr{
- typeName: s.TypeName(),
- fields: s.Fields(),
- }
- case UnspecifiedExprKind:
- e.exprKindCase = nil
- }
-}
-
-func (e *expr) RenumberIDs(idGen IDGenerator) {
- if e == nil {
- return
- }
- e.id = idGen(e.id)
- if e.exprKindCase != nil {
- e.exprKindCase.renumberIDs(idGen)
- }
-}
-
-type baseCallExpr struct {
- function string
- target Expr
- args []Expr
- isMember bool
-}
-
-func (*baseCallExpr) Kind() ExprKind {
- return CallKind
-}
-
-func (e *baseCallExpr) FunctionName() string {
- if e == nil {
- return ""
- }
- return e.function
-}
-
-func (e *baseCallExpr) IsMemberFunction() bool {
- if e == nil {
- return false
- }
- return e.isMember
-}
-
-func (e *baseCallExpr) Target() Expr {
- if e == nil || !e.IsMemberFunction() {
- return nilExpr
- }
- return e.target
-}
-
-func (e *baseCallExpr) Args() []Expr {
- if e == nil {
- return []Expr{}
- }
- return e.args
-}
-
-func (e *baseCallExpr) renumberIDs(idGen IDGenerator) {
- if e.IsMemberFunction() {
- e.Target().RenumberIDs(idGen)
- }
- for _, arg := range e.Args() {
- arg.RenumberIDs(idGen)
- }
-}
-
-func (*baseCallExpr) isExpr() {}
-
-var _ ComprehensionExpr = &baseComprehensionExpr{}
-
-type baseComprehensionExpr struct {
- iterRange Expr
- iterVar string
- accuVar string
- accuInit Expr
- loopCond Expr
- loopStep Expr
- result Expr
-}
-
-func (*baseComprehensionExpr) Kind() ExprKind {
- return ComprehensionKind
-}
-
-func (e *baseComprehensionExpr) IterRange() Expr {
- if e == nil {
- return nilExpr
- }
- return e.iterRange
-}
-
-func (e *baseComprehensionExpr) IterVar() string {
- return e.iterVar
-}
-
-func (e *baseComprehensionExpr) AccuVar() string {
- return e.accuVar
-}
-
-func (e *baseComprehensionExpr) AccuInit() Expr {
- if e == nil {
- return nilExpr
- }
- return e.accuInit
-}
-
-func (e *baseComprehensionExpr) LoopCondition() Expr {
- if e == nil {
- return nilExpr
- }
- return e.loopCond
-}
-
-func (e *baseComprehensionExpr) LoopStep() Expr {
- if e == nil {
- return nilExpr
- }
- return e.loopStep
-}
-
-func (e *baseComprehensionExpr) Result() Expr {
- if e == nil {
- return nilExpr
- }
- return e.result
-}
-
-func (e *baseComprehensionExpr) renumberIDs(idGen IDGenerator) {
- e.IterRange().RenumberIDs(idGen)
- e.AccuInit().RenumberIDs(idGen)
- e.LoopCondition().RenumberIDs(idGen)
- e.LoopStep().RenumberIDs(idGen)
- e.Result().RenumberIDs(idGen)
-}
-
-func (*baseComprehensionExpr) isExpr() {}
-
-var _ exprKindCase = baseIdentExpr("")
-
-type baseIdentExpr string
-
-func (baseIdentExpr) Kind() ExprKind {
- return IdentKind
-}
-
-func (e baseIdentExpr) renumberIDs(IDGenerator) {}
-
-func (baseIdentExpr) isExpr() {}
-
-var _ exprKindCase = &baseLiteral{}
-var _ ref.Val = &baseLiteral{}
-
-type baseLiteral struct {
- ref.Val
-}
-
-func (*baseLiteral) Kind() ExprKind {
- return LiteralKind
-}
-
-func (l *baseLiteral) renumberIDs(IDGenerator) {}
-
-func (*baseLiteral) isExpr() {}
-
-var _ ListExpr = &baseListExpr{}
-
-type baseListExpr struct {
- elements []Expr
- optIndices []int32
- optIndexMap map[int32]struct{}
-}
-
-func (*baseListExpr) Kind() ExprKind {
- return ListKind
-}
-
-func (e *baseListExpr) Elements() []Expr {
- if e == nil {
- return []Expr{}
- }
- return e.elements
-}
-
-func (e *baseListExpr) IsOptional(index int32) bool {
- _, found := e.optIndexMap[index]
- return found
-}
-
-func (e *baseListExpr) OptionalIndices() []int32 {
- if e == nil {
- return []int32{}
- }
- return e.optIndices
-}
-
-func (e *baseListExpr) Size() int {
- return len(e.Elements())
-}
-
-func (e *baseListExpr) renumberIDs(idGen IDGenerator) {
- for _, elem := range e.Elements() {
- elem.RenumberIDs(idGen)
- }
-}
-
-func (*baseListExpr) isExpr() {}
-
-type baseMapExpr struct {
- entries []EntryExpr
-}
-
-func (*baseMapExpr) Kind() ExprKind {
- return MapKind
-}
-
-func (e *baseMapExpr) Entries() []EntryExpr {
- if e == nil {
- return []EntryExpr{}
- }
- return e.entries
-}
-
-func (e *baseMapExpr) Size() int {
- return len(e.Entries())
-}
-
-func (e *baseMapExpr) renumberIDs(idGen IDGenerator) {
- for _, entry := range e.Entries() {
- entry.RenumberIDs(idGen)
- }
-}
-
-func (*baseMapExpr) isExpr() {}
-
-type baseSelectExpr struct {
- operand Expr
- field string
- testOnly bool
-}
-
-func (*baseSelectExpr) Kind() ExprKind {
- return SelectKind
-}
-
-func (e *baseSelectExpr) Operand() Expr {
- if e == nil || e.operand == nil {
- return nilExpr
- }
- return e.operand
-}
-
-func (e *baseSelectExpr) FieldName() string {
- if e == nil {
- return ""
- }
- return e.field
-}
-
-func (e *baseSelectExpr) IsTestOnly() bool {
- if e == nil {
- return false
- }
- return e.testOnly
-}
-
-func (e *baseSelectExpr) renumberIDs(idGen IDGenerator) {
- e.Operand().RenumberIDs(idGen)
-}
-
-func (*baseSelectExpr) isExpr() {}
-
-type baseStructExpr struct {
- typeName string
- fields []EntryExpr
-}
-
-func (*baseStructExpr) Kind() ExprKind {
- return StructKind
-}
-
-func (e *baseStructExpr) TypeName() string {
- if e == nil {
- return ""
- }
- return e.typeName
-}
-
-func (e *baseStructExpr) Fields() []EntryExpr {
- if e == nil {
- return []EntryExpr{}
- }
- return e.fields
-}
-
-func (e *baseStructExpr) renumberIDs(idGen IDGenerator) {
- for _, f := range e.Fields() {
- f.RenumberIDs(idGen)
- }
-}
-
-func (*baseStructExpr) isExpr() {}
-
-type entryExprKindCase interface {
- Kind() EntryExprKind
-
- renumberIDs(IDGenerator)
-
- isEntryExpr()
-}
-
-var _ EntryExpr = &entryExpr{}
-
-type entryExpr struct {
- id int64
- entryExprKindCase
-}
-
-func (e *entryExpr) ID() int64 {
- return e.id
-}
-
-func (e *entryExpr) AsMapEntry() MapEntry {
- if e.Kind() != MapEntryKind {
- return nilMapEntry
- }
- return e.entryExprKindCase.(MapEntry)
-}
-
-func (e *entryExpr) AsStructField() StructField {
- if e.Kind() != StructFieldKind {
- return nilStructField
- }
- return e.entryExprKindCase.(StructField)
-}
-
-func (e *entryExpr) RenumberIDs(idGen IDGenerator) {
- e.id = idGen(e.id)
- e.entryExprKindCase.renumberIDs(idGen)
-}
-
-type baseMapEntry struct {
- key Expr
- value Expr
- isOptional bool
-}
-
-func (e *baseMapEntry) Kind() EntryExprKind {
- return MapEntryKind
-}
-
-func (e *baseMapEntry) Key() Expr {
- if e == nil {
- return nilExpr
- }
- return e.key
-}
-
-func (e *baseMapEntry) Value() Expr {
- if e == nil {
- return nilExpr
- }
- return e.value
-}
-
-func (e *baseMapEntry) IsOptional() bool {
- if e == nil {
- return false
- }
- return e.isOptional
-}
-
-func (e *baseMapEntry) renumberIDs(idGen IDGenerator) {
- e.Key().RenumberIDs(idGen)
- e.Value().RenumberIDs(idGen)
-}
-
-func (*baseMapEntry) isEntryExpr() {}
-
-type baseStructField struct {
- field string
- value Expr
- isOptional bool
-}
-
-func (f *baseStructField) Kind() EntryExprKind {
- return StructFieldKind
-}
-
-func (f *baseStructField) Name() string {
- if f == nil {
- return ""
- }
- return f.field
-}
-
-func (f *baseStructField) Value() Expr {
- if f == nil {
- return nilExpr
- }
- return f.value
-}
-
-func (f *baseStructField) IsOptional() bool {
- if f == nil {
- return false
- }
- return f.isOptional
-}
-
-func (f *baseStructField) renumberIDs(idGen IDGenerator) {
- f.Value().RenumberIDs(idGen)
-}
-
-func (*baseStructField) isEntryExpr() {}
-
-var (
- nilExpr *expr = nil
- nilCall *baseCallExpr = nil
- nilCompre *baseComprehensionExpr = nil
- nilList *baseListExpr = nil
- nilMap *baseMapExpr = nil
- nilMapEntry *baseMapEntry = nil
- nilSel *baseSelectExpr = nil
- nilStruct *baseStructExpr = nil
- nilStructField *baseStructField = nil
-)
diff --git a/vendor/github.com/google/cel-go/common/ast/factory.go b/vendor/github.com/google/cel-go/common/ast/factory.go
deleted file mode 100644
index b7f36e72a..000000000
--- a/vendor/github.com/google/cel-go/common/ast/factory.go
+++ /dev/null
@@ -1,303 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ast
-
-import "github.com/google/cel-go/common/types/ref"
-
-// ExprFactory interfaces defines a set of methods necessary for building native expression values.
-type ExprFactory interface {
- // CopyExpr creates a deep copy of the input Expr value.
- CopyExpr(Expr) Expr
-
- // CopyEntryExpr creates a deep copy of the input EntryExpr value.
- CopyEntryExpr(EntryExpr) EntryExpr
-
- // NewCall creates an Expr value representing a global function call.
- NewCall(id int64, function string, args ...Expr) Expr
-
- // NewComprehension creates an Expr value representing a comprehension over a value range.
- NewComprehension(id int64, iterRange Expr, iterVar, accuVar string, accuInit, loopCondition, loopStep, result Expr) Expr
-
- // NewMemberCall creates an Expr value representing a member function call.
- NewMemberCall(id int64, function string, receiver Expr, args ...Expr) Expr
-
- // NewIdent creates an Expr value representing an identifier.
- NewIdent(id int64, name string) Expr
-
- // NewAccuIdent creates an Expr value representing an accumulator identifier within a
- //comprehension.
- NewAccuIdent(id int64) Expr
-
- // NewLiteral creates an Expr value representing a literal value, such as a string or integer.
- NewLiteral(id int64, value ref.Val) Expr
-
- // NewList creates an Expr value representing a list literal expression with optional indices.
- //
- // Optional indicies will typically be empty unless the CEL optional types are enabled.
- NewList(id int64, elems []Expr, optIndices []int32) Expr
-
- // NewMap creates an Expr value representing a map literal expression
- NewMap(id int64, entries []EntryExpr) Expr
-
- // NewMapEntry creates a MapEntry with a given key, value, and a flag indicating whether
- // the key is optionally set.
- NewMapEntry(id int64, key, value Expr, isOptional bool) EntryExpr
-
- // NewPresenceTest creates an Expr representing a field presence test on an operand expression.
- NewPresenceTest(id int64, operand Expr, field string) Expr
-
- // NewSelect creates an Expr representing a field selection on an operand expression.
- NewSelect(id int64, operand Expr, field string) Expr
-
- // NewStruct creates an Expr value representing a struct literal with a given type name and a
- // set of field initializers.
- NewStruct(id int64, typeName string, fields []EntryExpr) Expr
-
- // NewStructField creates a StructField with a given field name, value, and a flag indicating
- // whether the field is optionally set.
- NewStructField(id int64, field string, value Expr, isOptional bool) EntryExpr
-
- // NewUnspecifiedExpr creates an empty expression node.
- NewUnspecifiedExpr(id int64) Expr
-
- isExprFactory()
-}
-
-type baseExprFactory struct{}
-
-// NewExprFactory creates an ExprFactory instance.
-func NewExprFactory() ExprFactory {
- return &baseExprFactory{}
-}
-
-func (fac *baseExprFactory) NewCall(id int64, function string, args ...Expr) Expr {
- if len(args) == 0 {
- args = []Expr{}
- }
- return fac.newExpr(
- id,
- &baseCallExpr{
- function: function,
- target: nilExpr,
- args: args,
- isMember: false,
- })
-}
-
-func (fac *baseExprFactory) NewMemberCall(id int64, function string, target Expr, args ...Expr) Expr {
- if len(args) == 0 {
- args = []Expr{}
- }
- return fac.newExpr(
- id,
- &baseCallExpr{
- function: function,
- target: target,
- args: args,
- isMember: true,
- })
-}
-
-func (fac *baseExprFactory) NewComprehension(id int64, iterRange Expr, iterVar, accuVar string, accuInit, loopCond, loopStep, result Expr) Expr {
- return fac.newExpr(
- id,
- &baseComprehensionExpr{
- iterRange: iterRange,
- iterVar: iterVar,
- accuVar: accuVar,
- accuInit: accuInit,
- loopCond: loopCond,
- loopStep: loopStep,
- result: result,
- })
-}
-
-func (fac *baseExprFactory) NewIdent(id int64, name string) Expr {
- return fac.newExpr(id, baseIdentExpr(name))
-}
-
-func (fac *baseExprFactory) NewAccuIdent(id int64) Expr {
- return fac.NewIdent(id, "__result__")
-}
-
-func (fac *baseExprFactory) NewLiteral(id int64, value ref.Val) Expr {
- return fac.newExpr(id, &baseLiteral{Val: value})
-}
-
-func (fac *baseExprFactory) NewList(id int64, elems []Expr, optIndices []int32) Expr {
- optIndexMap := make(map[int32]struct{}, len(optIndices))
- for _, idx := range optIndices {
- optIndexMap[idx] = struct{}{}
- }
- return fac.newExpr(id,
- &baseListExpr{
- elements: elems,
- optIndices: optIndices,
- optIndexMap: optIndexMap,
- })
-}
-
-func (fac *baseExprFactory) NewMap(id int64, entries []EntryExpr) Expr {
- return fac.newExpr(id, &baseMapExpr{entries: entries})
-}
-
-func (fac *baseExprFactory) NewMapEntry(id int64, key, value Expr, isOptional bool) EntryExpr {
- return fac.newEntryExpr(
- id,
- &baseMapEntry{
- key: key,
- value: value,
- isOptional: isOptional,
- })
-}
-
-func (fac *baseExprFactory) NewPresenceTest(id int64, operand Expr, field string) Expr {
- return fac.newExpr(
- id,
- &baseSelectExpr{
- operand: operand,
- field: field,
- testOnly: true,
- })
-}
-
-func (fac *baseExprFactory) NewSelect(id int64, operand Expr, field string) Expr {
- return fac.newExpr(
- id,
- &baseSelectExpr{
- operand: operand,
- field: field,
- })
-}
-
-func (fac *baseExprFactory) NewStruct(id int64, typeName string, fields []EntryExpr) Expr {
- return fac.newExpr(
- id,
- &baseStructExpr{
- typeName: typeName,
- fields: fields,
- })
-}
-
-func (fac *baseExprFactory) NewStructField(id int64, field string, value Expr, isOptional bool) EntryExpr {
- return fac.newEntryExpr(
- id,
- &baseStructField{
- field: field,
- value: value,
- isOptional: isOptional,
- })
-}
-
-func (fac *baseExprFactory) NewUnspecifiedExpr(id int64) Expr {
- return fac.newExpr(id, nil)
-}
-
-func (fac *baseExprFactory) CopyExpr(e Expr) Expr {
- // unwrap navigable expressions to avoid unnecessary allocations during copying.
- if nav, ok := e.(*navigableExprImpl); ok {
- e = nav.Expr
- }
- switch e.Kind() {
- case CallKind:
- c := e.AsCall()
- argsCopy := make([]Expr, len(c.Args()))
- for i, arg := range c.Args() {
- argsCopy[i] = fac.CopyExpr(arg)
- }
- if !c.IsMemberFunction() {
- return fac.NewCall(e.ID(), c.FunctionName(), argsCopy...)
- }
- return fac.NewMemberCall(e.ID(), c.FunctionName(), fac.CopyExpr(c.Target()), argsCopy...)
- case ComprehensionKind:
- compre := e.AsComprehension()
- return fac.NewComprehension(e.ID(),
- fac.CopyExpr(compre.IterRange()),
- compre.IterVar(),
- compre.AccuVar(),
- fac.CopyExpr(compre.AccuInit()),
- fac.CopyExpr(compre.LoopCondition()),
- fac.CopyExpr(compre.LoopStep()),
- fac.CopyExpr(compre.Result()))
- case IdentKind:
- return fac.NewIdent(e.ID(), e.AsIdent())
- case ListKind:
- l := e.AsList()
- elemsCopy := make([]Expr, l.Size())
- for i, elem := range l.Elements() {
- elemsCopy[i] = fac.CopyExpr(elem)
- }
- return fac.NewList(e.ID(), elemsCopy, l.OptionalIndices())
- case LiteralKind:
- return fac.NewLiteral(e.ID(), e.AsLiteral())
- case MapKind:
- m := e.AsMap()
- entriesCopy := make([]EntryExpr, m.Size())
- for i, entry := range m.Entries() {
- entriesCopy[i] = fac.CopyEntryExpr(entry)
- }
- return fac.NewMap(e.ID(), entriesCopy)
- case SelectKind:
- s := e.AsSelect()
- if s.IsTestOnly() {
- return fac.NewPresenceTest(e.ID(), fac.CopyExpr(s.Operand()), s.FieldName())
- }
- return fac.NewSelect(e.ID(), fac.CopyExpr(s.Operand()), s.FieldName())
- case StructKind:
- s := e.AsStruct()
- fieldsCopy := make([]EntryExpr, len(s.Fields()))
- for i, field := range s.Fields() {
- fieldsCopy[i] = fac.CopyEntryExpr(field)
- }
- return fac.NewStruct(e.ID(), s.TypeName(), fieldsCopy)
- default:
- return fac.NewUnspecifiedExpr(e.ID())
- }
-}
-
-func (fac *baseExprFactory) CopyEntryExpr(e EntryExpr) EntryExpr {
- switch e.Kind() {
- case MapEntryKind:
- entry := e.AsMapEntry()
- return fac.NewMapEntry(e.ID(),
- fac.CopyExpr(entry.Key()), fac.CopyExpr(entry.Value()), entry.IsOptional())
- case StructFieldKind:
- field := e.AsStructField()
- return fac.NewStructField(e.ID(),
- field.Name(), fac.CopyExpr(field.Value()), field.IsOptional())
- default:
- return fac.newEntryExpr(e.ID(), nil)
- }
-}
-
-func (*baseExprFactory) isExprFactory() {}
-
-func (fac *baseExprFactory) newExpr(id int64, e exprKindCase) Expr {
- return &expr{
- id: id,
- exprKindCase: e,
- }
-}
-
-func (fac *baseExprFactory) newEntryExpr(id int64, e entryExprKindCase) EntryExpr {
- return &entryExpr{
- id: id,
- entryExprKindCase: e,
- }
-}
-
-var (
- defaultFactory = &baseExprFactory{}
-)
diff --git a/vendor/github.com/google/cel-go/common/ast/navigable.go b/vendor/github.com/google/cel-go/common/ast/navigable.go
deleted file mode 100644
index f5ddf6aac..000000000
--- a/vendor/github.com/google/cel-go/common/ast/navigable.go
+++ /dev/null
@@ -1,652 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ast
-
-import (
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-// NavigableExpr represents the base navigable expression value with methods to inspect the
-// parent and child expressions.
-type NavigableExpr interface {
- Expr
-
- // Type of the expression.
- //
- // If the expression is type-checked, the type check metadata is returned. If the expression
- // has not been type-checked, the types.DynType value is returned.
- Type() *types.Type
-
- // Parent returns the parent expression node, if one exists.
- Parent() (NavigableExpr, bool)
-
- // Children returns a list of child expression nodes.
- Children() []NavigableExpr
-
- // Depth indicates the depth in the expression tree.
- //
- // The root expression has depth 0.
- Depth() int
-}
-
-// NavigateAST converts an AST to a NavigableExpr
-func NavigateAST(ast *AST) NavigableExpr {
- return NavigateExpr(ast, ast.Expr())
-}
-
-// NavigateExpr creates a NavigableExpr whose type information is backed by the input AST.
-//
-// If the expression is already a NavigableExpr, the parent and depth information will be
-// propagated on the new NavigableExpr value; otherwise, the expr value will be treated
-// as though it is the root of the expression graph with a depth of 0.
-func NavigateExpr(ast *AST, expr Expr) NavigableExpr {
- depth := 0
- var parent NavigableExpr = nil
- if nav, ok := expr.(NavigableExpr); ok {
- depth = nav.Depth()
- parent, _ = nav.Parent()
- }
- return newNavigableExpr(ast, parent, expr, depth)
-}
-
-// ExprMatcher takes a NavigableExpr in and indicates whether the value is a match.
-//
-// This function type should be use with the `Match` and `MatchList` calls.
-type ExprMatcher func(NavigableExpr) bool
-
-// ConstantValueMatcher returns an ExprMatcher which will return true if the input NavigableExpr
-// is comprised of all constant values, such as a simple literal or even list and map literal.
-func ConstantValueMatcher() ExprMatcher {
- return matchIsConstantValue
-}
-
-// KindMatcher returns an ExprMatcher which will return true if the input NavigableExpr.Kind() matches
-// the specified `kind`.
-func KindMatcher(kind ExprKind) ExprMatcher {
- return func(e NavigableExpr) bool {
- return e.Kind() == kind
- }
-}
-
-// FunctionMatcher returns an ExprMatcher which will match NavigableExpr nodes of CallKind type whose
-// function name is equal to `funcName`.
-func FunctionMatcher(funcName string) ExprMatcher {
- return func(e NavigableExpr) bool {
- if e.Kind() != CallKind {
- return false
- }
- return e.AsCall().FunctionName() == funcName
- }
-}
-
-// AllMatcher returns true for all descendants of a NavigableExpr, effectively flattening them into a list.
-//
-// Such a result would work well with subsequent MatchList calls.
-func AllMatcher() ExprMatcher {
- return func(NavigableExpr) bool {
- return true
- }
-}
-
-// MatchDescendants takes a NavigableExpr and ExprMatcher and produces a list of NavigableExpr values
-// matching the input criteria in post-order (bottom up).
-func MatchDescendants(expr NavigableExpr, matcher ExprMatcher) []NavigableExpr {
- matches := []NavigableExpr{}
- navVisitor := &baseVisitor{
- visitExpr: func(e Expr) {
- nav := e.(NavigableExpr)
- if matcher(nav) {
- matches = append(matches, nav)
- }
- },
- }
- visit(expr, navVisitor, postOrder, 0, 0)
- return matches
-}
-
-// MatchSubset applies an ExprMatcher to a list of NavigableExpr values and their descendants, producing a
-// subset of NavigableExpr values which match.
-func MatchSubset(exprs []NavigableExpr, matcher ExprMatcher) []NavigableExpr {
- matches := []NavigableExpr{}
- navVisitor := &baseVisitor{
- visitExpr: func(e Expr) {
- nav := e.(NavigableExpr)
- if matcher(nav) {
- matches = append(matches, nav)
- }
- },
- }
- for _, expr := range exprs {
- visit(expr, navVisitor, postOrder, 0, 1)
- }
- return matches
-}
-
-// Visitor defines an object for visiting Expr and EntryExpr nodes within an expression graph.
-type Visitor interface {
- // VisitExpr visits the input expression.
- VisitExpr(Expr)
-
- // VisitEntryExpr visits the input entry expression, i.e. a struct field or map entry.
- VisitEntryExpr(EntryExpr)
-}
-
-type baseVisitor struct {
- visitExpr func(Expr)
- visitEntryExpr func(EntryExpr)
-}
-
-// VisitExpr visits the Expr if the internal expr visitor has been configured.
-func (v *baseVisitor) VisitExpr(e Expr) {
- if v.visitExpr != nil {
- v.visitExpr(e)
- }
-}
-
-// VisitEntryExpr visits the entry if the internal expr entry visitor has been configured.
-func (v *baseVisitor) VisitEntryExpr(e EntryExpr) {
- if v.visitEntryExpr != nil {
- v.visitEntryExpr(e)
- }
-}
-
-// NewExprVisitor creates a visitor which only visits expression nodes.
-func NewExprVisitor(v func(Expr)) Visitor {
- return &baseVisitor{
- visitExpr: v,
- visitEntryExpr: nil,
- }
-}
-
-// PostOrderVisit walks the expression graph and calls the visitor in post-order (bottom-up).
-func PostOrderVisit(expr Expr, visitor Visitor) {
- visit(expr, visitor, postOrder, 0, 0)
-}
-
-// PreOrderVisit walks the expression graph and calls the visitor in pre-order (top-down).
-func PreOrderVisit(expr Expr, visitor Visitor) {
- visit(expr, visitor, preOrder, 0, 0)
-}
-
-type visitOrder int
-
-const (
- preOrder = iota + 1
- postOrder
-)
-
-// TODO: consider exposing a way to configure a limit for the max visit depth.
-// It's possible that we could want to configure this on the NewExprVisitor()
-// and through MatchDescendents() / MaxID().
-func visit(expr Expr, visitor Visitor, order visitOrder, depth, maxDepth int) {
- if maxDepth > 0 && depth == maxDepth {
- return
- }
- if order == preOrder {
- visitor.VisitExpr(expr)
- }
- switch expr.Kind() {
- case CallKind:
- c := expr.AsCall()
- if c.IsMemberFunction() {
- visit(c.Target(), visitor, order, depth+1, maxDepth)
- }
- for _, arg := range c.Args() {
- visit(arg, visitor, order, depth+1, maxDepth)
- }
- case ComprehensionKind:
- c := expr.AsComprehension()
- visit(c.IterRange(), visitor, order, depth+1, maxDepth)
- visit(c.AccuInit(), visitor, order, depth+1, maxDepth)
- visit(c.LoopCondition(), visitor, order, depth+1, maxDepth)
- visit(c.LoopStep(), visitor, order, depth+1, maxDepth)
- visit(c.Result(), visitor, order, depth+1, maxDepth)
- case ListKind:
- l := expr.AsList()
- for _, elem := range l.Elements() {
- visit(elem, visitor, order, depth+1, maxDepth)
- }
- case MapKind:
- m := expr.AsMap()
- for _, e := range m.Entries() {
- if order == preOrder {
- visitor.VisitEntryExpr(e)
- }
- entry := e.AsMapEntry()
- visit(entry.Key(), visitor, order, depth+1, maxDepth)
- visit(entry.Value(), visitor, order, depth+1, maxDepth)
- if order == postOrder {
- visitor.VisitEntryExpr(e)
- }
- }
- case SelectKind:
- visit(expr.AsSelect().Operand(), visitor, order, depth+1, maxDepth)
- case StructKind:
- s := expr.AsStruct()
- for _, f := range s.Fields() {
- visitor.VisitEntryExpr(f)
- visit(f.AsStructField().Value(), visitor, order, depth+1, maxDepth)
- }
- }
- if order == postOrder {
- visitor.VisitExpr(expr)
- }
-}
-
-func matchIsConstantValue(e NavigableExpr) bool {
- if e.Kind() == LiteralKind {
- return true
- }
- if e.Kind() == StructKind || e.Kind() == MapKind || e.Kind() == ListKind {
- for _, child := range e.Children() {
- if !matchIsConstantValue(child) {
- return false
- }
- }
- return true
- }
- return false
-}
-
-func newNavigableExpr(ast *AST, parent NavigableExpr, expr Expr, depth int) NavigableExpr {
- // Reduce navigable expression nesting by unwrapping the embedded Expr value.
- if nav, ok := expr.(*navigableExprImpl); ok {
- expr = nav.Expr
- }
- nav := &navigableExprImpl{
- Expr: expr,
- depth: depth,
- ast: ast,
- parent: parent,
- createChildren: getChildFactory(expr),
- }
- return nav
-}
-
-type navigableExprImpl struct {
- Expr
- depth int
- ast *AST
- parent NavigableExpr
- createChildren childFactory
-}
-
-func (nav *navigableExprImpl) Parent() (NavigableExpr, bool) {
- if nav.parent != nil {
- return nav.parent, true
- }
- return nil, false
-}
-
-func (nav *navigableExprImpl) ID() int64 {
- return nav.Expr.ID()
-}
-
-func (nav *navigableExprImpl) Kind() ExprKind {
- return nav.Expr.Kind()
-}
-
-func (nav *navigableExprImpl) Type() *types.Type {
- return nav.ast.GetType(nav.ID())
-}
-
-func (nav *navigableExprImpl) Children() []NavigableExpr {
- return nav.createChildren(nav)
-}
-
-func (nav *navigableExprImpl) Depth() int {
- return nav.depth
-}
-
-func (nav *navigableExprImpl) AsCall() CallExpr {
- return navigableCallImpl{navigableExprImpl: nav}
-}
-
-func (nav *navigableExprImpl) AsComprehension() ComprehensionExpr {
- return navigableComprehensionImpl{navigableExprImpl: nav}
-}
-
-func (nav *navigableExprImpl) AsIdent() string {
- return nav.Expr.AsIdent()
-}
-
-func (nav *navigableExprImpl) AsList() ListExpr {
- return navigableListImpl{navigableExprImpl: nav}
-}
-
-func (nav *navigableExprImpl) AsLiteral() ref.Val {
- return nav.Expr.AsLiteral()
-}
-
-func (nav *navigableExprImpl) AsMap() MapExpr {
- return navigableMapImpl{navigableExprImpl: nav}
-}
-
-func (nav *navigableExprImpl) AsSelect() SelectExpr {
- return navigableSelectImpl{navigableExprImpl: nav}
-}
-
-func (nav *navigableExprImpl) AsStruct() StructExpr {
- return navigableStructImpl{navigableExprImpl: nav}
-}
-
-func (nav *navigableExprImpl) createChild(e Expr) NavigableExpr {
- return newNavigableExpr(nav.ast, nav, e, nav.depth+1)
-}
-
-func (nav *navigableExprImpl) isExpr() {}
-
-type navigableCallImpl struct {
- *navigableExprImpl
-}
-
-func (call navigableCallImpl) FunctionName() string {
- return call.Expr.AsCall().FunctionName()
-}
-
-func (call navigableCallImpl) IsMemberFunction() bool {
- return call.Expr.AsCall().IsMemberFunction()
-}
-
-func (call navigableCallImpl) Target() Expr {
- t := call.Expr.AsCall().Target()
- if t != nil {
- return call.createChild(t)
- }
- return nil
-}
-
-func (call navigableCallImpl) Args() []Expr {
- args := call.Expr.AsCall().Args()
- navArgs := make([]Expr, len(args))
- for i, a := range args {
- navArgs[i] = call.createChild(a)
- }
- return navArgs
-}
-
-type navigableComprehensionImpl struct {
- *navigableExprImpl
-}
-
-func (comp navigableComprehensionImpl) IterRange() Expr {
- return comp.createChild(comp.Expr.AsComprehension().IterRange())
-}
-
-func (comp navigableComprehensionImpl) IterVar() string {
- return comp.Expr.AsComprehension().IterVar()
-}
-
-func (comp navigableComprehensionImpl) AccuVar() string {
- return comp.Expr.AsComprehension().AccuVar()
-}
-
-func (comp navigableComprehensionImpl) AccuInit() Expr {
- return comp.createChild(comp.Expr.AsComprehension().AccuInit())
-}
-
-func (comp navigableComprehensionImpl) LoopCondition() Expr {
- return comp.createChild(comp.Expr.AsComprehension().LoopCondition())
-}
-
-func (comp navigableComprehensionImpl) LoopStep() Expr {
- return comp.createChild(comp.Expr.AsComprehension().LoopStep())
-}
-
-func (comp navigableComprehensionImpl) Result() Expr {
- return comp.createChild(comp.Expr.AsComprehension().Result())
-}
-
-type navigableListImpl struct {
- *navigableExprImpl
-}
-
-func (l navigableListImpl) Elements() []Expr {
- pbElems := l.Expr.AsList().Elements()
- elems := make([]Expr, len(pbElems))
- for i := 0; i < len(pbElems); i++ {
- elems[i] = l.createChild(pbElems[i])
- }
- return elems
-}
-
-func (l navigableListImpl) IsOptional(index int32) bool {
- return l.Expr.AsList().IsOptional(index)
-}
-
-func (l navigableListImpl) OptionalIndices() []int32 {
- return l.Expr.AsList().OptionalIndices()
-}
-
-func (l navigableListImpl) Size() int {
- return l.Expr.AsList().Size()
-}
-
-type navigableMapImpl struct {
- *navigableExprImpl
-}
-
-func (m navigableMapImpl) Entries() []EntryExpr {
- mapExpr := m.Expr.AsMap()
- entries := make([]EntryExpr, len(mapExpr.Entries()))
- for i, e := range mapExpr.Entries() {
- entry := e.AsMapEntry()
- entries[i] = &entryExpr{
- id: e.ID(),
- entryExprKindCase: navigableEntryImpl{
- key: m.createChild(entry.Key()),
- val: m.createChild(entry.Value()),
- isOpt: entry.IsOptional(),
- },
- }
- }
- return entries
-}
-
-func (m navigableMapImpl) Size() int {
- return m.Expr.AsMap().Size()
-}
-
-type navigableEntryImpl struct {
- key NavigableExpr
- val NavigableExpr
- isOpt bool
-}
-
-func (e navigableEntryImpl) Kind() EntryExprKind {
- return MapEntryKind
-}
-
-func (e navigableEntryImpl) Key() Expr {
- return e.key
-}
-
-func (e navigableEntryImpl) Value() Expr {
- return e.val
-}
-
-func (e navigableEntryImpl) IsOptional() bool {
- return e.isOpt
-}
-
-func (e navigableEntryImpl) renumberIDs(IDGenerator) {}
-
-func (e navigableEntryImpl) isEntryExpr() {}
-
-type navigableSelectImpl struct {
- *navigableExprImpl
-}
-
-func (sel navigableSelectImpl) FieldName() string {
- return sel.Expr.AsSelect().FieldName()
-}
-
-func (sel navigableSelectImpl) IsTestOnly() bool {
- return sel.Expr.AsSelect().IsTestOnly()
-}
-
-func (sel navigableSelectImpl) Operand() Expr {
- return sel.createChild(sel.Expr.AsSelect().Operand())
-}
-
-type navigableStructImpl struct {
- *navigableExprImpl
-}
-
-func (s navigableStructImpl) TypeName() string {
- return s.Expr.AsStruct().TypeName()
-}
-
-func (s navigableStructImpl) Fields() []EntryExpr {
- fieldInits := s.Expr.AsStruct().Fields()
- fields := make([]EntryExpr, len(fieldInits))
- for i, f := range fieldInits {
- field := f.AsStructField()
- fields[i] = &entryExpr{
- id: f.ID(),
- entryExprKindCase: navigableFieldImpl{
- name: field.Name(),
- val: s.createChild(field.Value()),
- isOpt: field.IsOptional(),
- },
- }
- }
- return fields
-}
-
-type navigableFieldImpl struct {
- name string
- val NavigableExpr
- isOpt bool
-}
-
-func (f navigableFieldImpl) Kind() EntryExprKind {
- return StructFieldKind
-}
-
-func (f navigableFieldImpl) Name() string {
- return f.name
-}
-
-func (f navigableFieldImpl) Value() Expr {
- return f.val
-}
-
-func (f navigableFieldImpl) IsOptional() bool {
- return f.isOpt
-}
-
-func (f navigableFieldImpl) renumberIDs(IDGenerator) {}
-
-func (f navigableFieldImpl) isEntryExpr() {}
-
-func getChildFactory(expr Expr) childFactory {
- if expr == nil {
- return noopFactory
- }
- switch expr.Kind() {
- case LiteralKind:
- return noopFactory
- case IdentKind:
- return noopFactory
- case SelectKind:
- return selectFactory
- case CallKind:
- return callArgFactory
- case ListKind:
- return listElemFactory
- case MapKind:
- return mapEntryFactory
- case StructKind:
- return structEntryFactory
- case ComprehensionKind:
- return comprehensionFactory
- default:
- return noopFactory
- }
-}
-
-type childFactory func(*navigableExprImpl) []NavigableExpr
-
-func noopFactory(*navigableExprImpl) []NavigableExpr {
- return nil
-}
-
-func selectFactory(nav *navigableExprImpl) []NavigableExpr {
- return []NavigableExpr{nav.createChild(nav.AsSelect().Operand())}
-}
-
-func callArgFactory(nav *navigableExprImpl) []NavigableExpr {
- call := nav.Expr.AsCall()
- argCount := len(call.Args())
- if call.IsMemberFunction() {
- argCount++
- }
- navExprs := make([]NavigableExpr, argCount)
- i := 0
- if call.IsMemberFunction() {
- navExprs[i] = nav.createChild(call.Target())
- i++
- }
- for _, arg := range call.Args() {
- navExprs[i] = nav.createChild(arg)
- i++
- }
- return navExprs
-}
-
-func listElemFactory(nav *navigableExprImpl) []NavigableExpr {
- l := nav.Expr.AsList()
- navExprs := make([]NavigableExpr, len(l.Elements()))
- for i, e := range l.Elements() {
- navExprs[i] = nav.createChild(e)
- }
- return navExprs
-}
-
-func structEntryFactory(nav *navigableExprImpl) []NavigableExpr {
- s := nav.Expr.AsStruct()
- entries := make([]NavigableExpr, len(s.Fields()))
- for i, e := range s.Fields() {
- f := e.AsStructField()
- entries[i] = nav.createChild(f.Value())
- }
- return entries
-}
-
-func mapEntryFactory(nav *navigableExprImpl) []NavigableExpr {
- m := nav.Expr.AsMap()
- entries := make([]NavigableExpr, len(m.Entries())*2)
- j := 0
- for _, e := range m.Entries() {
- mapEntry := e.AsMapEntry()
- entries[j] = nav.createChild(mapEntry.Key())
- entries[j+1] = nav.createChild(mapEntry.Value())
- j += 2
- }
- return entries
-}
-
-func comprehensionFactory(nav *navigableExprImpl) []NavigableExpr {
- compre := nav.Expr.AsComprehension()
- return []NavigableExpr{
- nav.createChild(compre.IterRange()),
- nav.createChild(compre.AccuInit()),
- nav.createChild(compre.LoopCondition()),
- nav.createChild(compre.LoopStep()),
- nav.createChild(compre.Result()),
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/containers/BUILD.bazel b/vendor/github.com/google/cel-go/common/containers/BUILD.bazel
deleted file mode 100644
index 81197f064..000000000
--- a/vendor/github.com/google/cel-go/common/containers/BUILD.bazel
+++ /dev/null
@@ -1,31 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "container.go",
- ],
- importpath = "github.com/google/cel-go/common/containers",
- deps = [
- "//common/ast:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = [
- "container_test.go",
- ],
- embed = [
- ":go_default_library",
- ],
- deps = [
- "//common/ast:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/containers/container.go b/vendor/github.com/google/cel-go/common/containers/container.go
deleted file mode 100644
index 52153d4cd..000000000
--- a/vendor/github.com/google/cel-go/common/containers/container.go
+++ /dev/null
@@ -1,316 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package containers defines types and functions for resolving qualified names within a namespace
-// or type provided to CEL.
-package containers
-
-import (
- "fmt"
- "strings"
-
- "github.com/google/cel-go/common/ast"
-)
-
-var (
- // DefaultContainer has an empty container name.
- DefaultContainer *Container = nil
-
- // Empty map to search for aliases when needed.
- noAliases = make(map[string]string)
-)
-
-// NewContainer creates a new Container with the fully-qualified name.
-func NewContainer(opts ...ContainerOption) (*Container, error) {
- var c *Container
- var err error
- for _, opt := range opts {
- c, err = opt(c)
- if err != nil {
- return nil, err
- }
- }
- return c, nil
-}
-
-// Container holds a reference to an optional qualified container name and set of aliases.
-//
-// The program container can be used to simplify variable, function, and type specification within
-// CEL programs and behaves more or less like a C++ namespace. See ResolveCandidateNames for more
-// details.
-type Container struct {
- name string
- aliases map[string]string
-}
-
-// Extend creates a new Container with the existing settings and applies a series of
-// ContainerOptions to further configure the new container.
-func (c *Container) Extend(opts ...ContainerOption) (*Container, error) {
- if c == nil {
- return NewContainer(opts...)
- }
- // Copy the name and aliases of the existing container.
- ext := &Container{name: c.Name()}
- if len(c.aliasSet()) > 0 {
- aliasSet := make(map[string]string, len(c.aliasSet()))
- for k, v := range c.aliasSet() {
- aliasSet[k] = v
- }
- ext.aliases = aliasSet
- }
- // Apply the new options to the container.
- var err error
- for _, opt := range opts {
- ext, err = opt(ext)
- if err != nil {
- return nil, err
- }
- }
- return ext, nil
-}
-
-// Name returns the fully-qualified name of the container.
-//
-// The name may conceptually be a namespace, package, or type.
-func (c *Container) Name() string {
- if c == nil {
- return ""
- }
- return c.name
-}
-
-// ResolveCandidateNames returns the candidates name of namespaced identifiers in C++ resolution
-// order.
-//
-// Names which shadow other names are returned first. If a name includes a leading dot ('.'),
-// the name is treated as an absolute identifier which cannot be shadowed.
-//
-// Given a container name a.b.c.M.N and a type name R.s, this will deliver in order:
-//
-// a.b.c.M.N.R.s
-// a.b.c.M.R.s
-// a.b.c.R.s
-// a.b.R.s
-// a.R.s
-// R.s
-//
-// If aliases or abbreviations are configured for the container, then alias names will take
-// precedence over containerized names.
-func (c *Container) ResolveCandidateNames(name string) []string {
- if strings.HasPrefix(name, ".") {
- qn := name[1:]
- alias, isAlias := c.findAlias(qn)
- if isAlias {
- return []string{alias}
- }
- return []string{qn}
- }
- alias, isAlias := c.findAlias(name)
- if isAlias {
- return []string{alias}
- }
- if c.Name() == "" {
- return []string{name}
- }
- nextCont := c.Name()
- candidates := []string{nextCont + "." + name}
- for i := strings.LastIndex(nextCont, "."); i >= 0; i = strings.LastIndex(nextCont, ".") {
- nextCont = nextCont[:i]
- candidates = append(candidates, nextCont+"."+name)
- }
- return append(candidates, name)
-}
-
-// aliasSet returns the alias to fully-qualified name mapping stored in the container.
-func (c *Container) aliasSet() map[string]string {
- if c == nil || c.aliases == nil {
- return noAliases
- }
- return c.aliases
-}
-
-// findAlias takes a name as input and returns an alias expansion if one exists.
-//
-// If the name is qualified, the first component of the qualified name is checked against known
-// aliases. Any alias that is found in a qualified name is expanded in the result:
-//
-// alias: R -> my.alias.R
-// name: R.S.T
-// output: my.alias.R.S.T
-//
-// Note, the name must not have a leading dot.
-func (c *Container) findAlias(name string) (string, bool) {
- // If an alias exists for the name, ensure it is searched last.
- simple := name
- qualifier := ""
- dot := strings.Index(name, ".")
- if dot >= 0 {
- simple = name[0:dot]
- qualifier = name[dot:]
- }
- alias, found := c.aliasSet()[simple]
- if !found {
- return "", false
- }
- return alias + qualifier, true
-}
-
-// ContainerOption specifies a functional configuration option for a Container.
-//
-// Note, ContainerOption implementations must be able to handle nil container inputs.
-type ContainerOption func(*Container) (*Container, error)
-
-// Abbrevs configures a set of simple names as abbreviations for fully-qualified names.
-//
-// An abbreviation (abbrev for short) is a simple name that expands to a fully-qualified name.
-// Abbreviations can be useful when working with variables, functions, and especially types from
-// multiple namespaces:
-//
-// // CEL object construction
-// qual.pkg.version.ObjTypeName{
-// field: alt.container.ver.FieldTypeName{value: ...}
-// }
-//
-// Only one the qualified names above may be used as the CEL container, so at least one of these
-// references must be a long qualified name within an otherwise short CEL program. Using the
-// following abbreviations, the program becomes much simpler:
-//
-// // CEL Go option
-// Abbrevs("qual.pkg.version.ObjTypeName", "alt.container.ver.FieldTypeName")
-// // Simplified Object construction
-// ObjTypeName{field: FieldTypeName{value: ...}}
-//
-// There are a few rules for the qualified names and the simple abbreviations generated from them:
-// - Qualified names must be dot-delimited, e.g. `package.subpkg.name`.
-// - The last element in the qualified name is the abbreviation.
-// - Abbreviations must not collide with each other.
-// - The abbreviation must not collide with unqualified names in use.
-//
-// Abbreviations are distinct from container-based references in the following important ways:
-// - Abbreviations must expand to a fully-qualified name.
-// - Expanded abbreviations do not participate in namespace resolution.
-// - Abbreviation expansion is done instead of the container search for a matching identifier.
-// - Containers follow C++ namespace resolution rules with searches from the most qualified name
-// to the least qualified name.
-// - Container references within the CEL program may be relative, and are resolved to fully
-// qualified names at either type-check time or program plan time, whichever comes first.
-//
-// If there is ever a case where an identifier could be in both the container and as an
-// abbreviation, the abbreviation wins as this will ensure that the meaning of a program is
-// preserved between compilations even as the container evolves.
-func Abbrevs(qualifiedNames ...string) ContainerOption {
- return func(c *Container) (*Container, error) {
- for _, qn := range qualifiedNames {
- ind := strings.LastIndex(qn, ".")
- if ind <= 0 || ind >= len(qn)-1 {
- return nil, fmt.Errorf(
- "invalid qualified name: %s, wanted name of the form 'qualified.name'", qn)
- }
- alias := qn[ind+1:]
- var err error
- c, err = aliasAs("abbreviation", qn, alias)(c)
- if err != nil {
- return nil, err
- }
- }
- return c, nil
- }
-}
-
-// Alias associates a fully-qualified name with a user-defined alias.
-//
-// In general, Abbrevs is preferred to Alias since the names generated from the Abbrevs option
-// are more easily traced back to source code. The Alias option is useful for propagating alias
-// configuration from one Container instance to another, and may also be useful for remapping
-// poorly chosen protobuf message / package names.
-//
-// Note: all of the rules that apply to Abbrevs also apply to Alias.
-func Alias(qualifiedName, alias string) ContainerOption {
- return aliasAs("alias", qualifiedName, alias)
-}
-
-func aliasAs(kind, qualifiedName, alias string) ContainerOption {
- return func(c *Container) (*Container, error) {
- if len(alias) == 0 || strings.Contains(alias, ".") {
- return nil, fmt.Errorf(
- "%s must be non-empty and simple (not qualified): %s=%s", kind, kind, alias)
- }
-
- if qualifiedName[0:1] == "." {
- return nil, fmt.Errorf("qualified name must not begin with a leading '.': %s",
- qualifiedName)
- }
- ind := strings.LastIndex(qualifiedName, ".")
- if ind <= 0 || ind == len(qualifiedName)-1 {
- return nil, fmt.Errorf("%s must refer to a valid qualified name: %s",
- kind, qualifiedName)
- }
- aliasRef, found := c.aliasSet()[alias]
- if found {
- return nil, fmt.Errorf(
- "%s collides with existing reference: name=%s, %s=%s, existing=%s",
- kind, qualifiedName, kind, alias, aliasRef)
- }
- if strings.HasPrefix(c.Name(), alias+".") || c.Name() == alias {
- return nil, fmt.Errorf(
- "%s collides with container name: name=%s, %s=%s, container=%s",
- kind, qualifiedName, kind, alias, c.Name())
- }
- if c == nil {
- c = &Container{}
- }
- if c.aliases == nil {
- c.aliases = make(map[string]string)
- }
- c.aliases[alias] = qualifiedName
- return c, nil
- }
-}
-
-// Name sets the fully-qualified name of the Container.
-func Name(name string) ContainerOption {
- return func(c *Container) (*Container, error) {
- if len(name) > 0 && name[0:1] == "." {
- return nil, fmt.Errorf("container name must not contain a leading '.': %s", name)
- }
- if c.Name() == name {
- return c, nil
- }
- if c == nil {
- return &Container{name: name}, nil
- }
- c.name = name
- return c, nil
- }
-}
-
-// ToQualifiedName converts an expression AST into a qualified name if possible, with a boolean
-// 'found' value that indicates if the conversion is successful.
-func ToQualifiedName(e ast.Expr) (string, bool) {
- switch e.Kind() {
- case ast.IdentKind:
- id := e.AsIdent()
- return id, true
- case ast.SelectKind:
- sel := e.AsSelect()
- // Test only expressions are not valid as qualified names.
- if sel.IsTestOnly() {
- return "", false
- }
- if qual, found := ToQualifiedName(sel.Operand()); found {
- return qual + "." + sel.FieldName(), true
- }
- }
- return "", false
-}
diff --git a/vendor/github.com/google/cel-go/common/cost.go b/vendor/github.com/google/cel-go/common/cost.go
deleted file mode 100644
index 5e24bd0f4..000000000
--- a/vendor/github.com/google/cel-go/common/cost.go
+++ /dev/null
@@ -1,40 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package common
-
-const (
- // SelectAndIdentCost is the cost of an operation that accesses an identifier or performs a select.
- SelectAndIdentCost = 1
-
- // ConstCost is the cost of an operation that accesses a constant.
- ConstCost = 0
-
- // ListCreateBaseCost is the base cost of any operation that creates a new list.
- ListCreateBaseCost = 10
-
- // MapCreateBaseCost is the base cost of any operation that creates a new map.
- MapCreateBaseCost = 30
-
- // StructCreateBaseCost is the base cost of any operation that creates a new struct.
- StructCreateBaseCost = 40
-
- // StringTraversalCostFactor is multiplied to a length of a string when computing the cost of traversing the entire
- // string once.
- StringTraversalCostFactor = 0.1
-
- // RegexStringLengthCostFactor is multiplied ot the length of a regex string pattern when computing the cost of
- // applying the regex to a string of unit cost.
- RegexStringLengthCostFactor = 0.25
-)
diff --git a/vendor/github.com/google/cel-go/common/debug/BUILD.bazel b/vendor/github.com/google/cel-go/common/debug/BUILD.bazel
deleted file mode 100644
index 724ed3404..000000000
--- a/vendor/github.com/google/cel-go/common/debug/BUILD.bazel
+++ /dev/null
@@ -1,20 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "debug.go",
- ],
- importpath = "github.com/google/cel-go/common/debug",
- deps = [
- "//common:go_default_library",
- "//common/ast:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/debug/debug.go b/vendor/github.com/google/cel-go/common/debug/debug.go
deleted file mode 100644
index e4c01ac6e..000000000
--- a/vendor/github.com/google/cel-go/common/debug/debug.go
+++ /dev/null
@@ -1,309 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package debug provides tools to print a parsed expression graph and
-// adorn each expression element with additional metadata.
-package debug
-
-import (
- "bytes"
- "fmt"
- "strconv"
- "strings"
-
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-// Adorner returns debug metadata that will be tacked on to the string
-// representation of an expression.
-type Adorner interface {
- // GetMetadata for the input context.
- GetMetadata(ctx any) string
-}
-
-// Writer manages writing expressions to an internal string.
-type Writer interface {
- fmt.Stringer
-
- // Buffer pushes an expression into an internal queue of expressions to
- // write to a string.
- Buffer(e ast.Expr)
-}
-
-type emptyDebugAdorner struct {
-}
-
-var emptyAdorner Adorner = &emptyDebugAdorner{}
-
-func (a *emptyDebugAdorner) GetMetadata(e any) string {
- return ""
-}
-
-// ToDebugString gives the unadorned string representation of the Expr.
-func ToDebugString(e ast.Expr) string {
- return ToAdornedDebugString(e, emptyAdorner)
-}
-
-// ToAdornedDebugString gives the adorned string representation of the Expr.
-func ToAdornedDebugString(e ast.Expr, adorner Adorner) string {
- w := newDebugWriter(adorner)
- w.Buffer(e)
- return w.String()
-}
-
-// debugWriter is used to print out pretty-printed debug strings.
-type debugWriter struct {
- adorner Adorner
- buffer bytes.Buffer
- indent int
- lineStart bool
-}
-
-func newDebugWriter(a Adorner) *debugWriter {
- return &debugWriter{
- adorner: a,
- indent: 0,
- lineStart: true,
- }
-}
-
-func (w *debugWriter) Buffer(e ast.Expr) {
- if e == nil {
- return
- }
- switch e.Kind() {
- case ast.LiteralKind:
- w.append(formatLiteral(e.AsLiteral()))
- case ast.IdentKind:
- w.append(e.AsIdent())
- case ast.SelectKind:
- w.appendSelect(e.AsSelect())
- case ast.CallKind:
- w.appendCall(e.AsCall())
- case ast.ListKind:
- w.appendList(e.AsList())
- case ast.MapKind:
- w.appendMap(e.AsMap())
- case ast.StructKind:
- w.appendStruct(e.AsStruct())
- case ast.ComprehensionKind:
- w.appendComprehension(e.AsComprehension())
- }
- w.adorn(e)
-}
-
-func (w *debugWriter) appendSelect(sel ast.SelectExpr) {
- w.Buffer(sel.Operand())
- w.append(".")
- w.append(sel.FieldName())
- if sel.IsTestOnly() {
- w.append("~test-only~")
- }
-}
-
-func (w *debugWriter) appendCall(call ast.CallExpr) {
- if call.IsMemberFunction() {
- w.Buffer(call.Target())
- w.append(".")
- }
- w.append(call.FunctionName())
- w.append("(")
- if len(call.Args()) > 0 {
- w.addIndent()
- w.appendLine()
- for i, arg := range call.Args() {
- if i > 0 {
- w.append(",")
- w.appendLine()
- }
- w.Buffer(arg)
- }
- w.removeIndent()
- w.appendLine()
- }
- w.append(")")
-}
-
-func (w *debugWriter) appendList(list ast.ListExpr) {
- w.append("[")
- if len(list.Elements()) > 0 {
- w.appendLine()
- w.addIndent()
- for i, elem := range list.Elements() {
- if i > 0 {
- w.append(",")
- w.appendLine()
- }
- w.Buffer(elem)
- }
- w.removeIndent()
- w.appendLine()
- }
- w.append("]")
-}
-
-func (w *debugWriter) appendStruct(obj ast.StructExpr) {
- w.append(obj.TypeName())
- w.append("{")
- if len(obj.Fields()) > 0 {
- w.appendLine()
- w.addIndent()
- for i, f := range obj.Fields() {
- field := f.AsStructField()
- if i > 0 {
- w.append(",")
- w.appendLine()
- }
- if field.IsOptional() {
- w.append("?")
- }
- w.append(field.Name())
- w.append(":")
- w.Buffer(field.Value())
- w.adorn(f)
- }
- w.removeIndent()
- w.appendLine()
- }
- w.append("}")
-}
-
-func (w *debugWriter) appendMap(m ast.MapExpr) {
- w.append("{")
- if m.Size() > 0 {
- w.appendLine()
- w.addIndent()
- for i, e := range m.Entries() {
- entry := e.AsMapEntry()
- if i > 0 {
- w.append(",")
- w.appendLine()
- }
- if entry.IsOptional() {
- w.append("?")
- }
- w.Buffer(entry.Key())
- w.append(":")
- w.Buffer(entry.Value())
- w.adorn(e)
- }
- w.removeIndent()
- w.appendLine()
- }
- w.append("}")
-}
-
-func (w *debugWriter) appendComprehension(comprehension ast.ComprehensionExpr) {
- w.append("__comprehension__(")
- w.addIndent()
- w.appendLine()
- w.append("// Variable")
- w.appendLine()
- w.append(comprehension.IterVar())
- w.append(",")
- w.appendLine()
- w.append("// Target")
- w.appendLine()
- w.Buffer(comprehension.IterRange())
- w.append(",")
- w.appendLine()
- w.append("// Accumulator")
- w.appendLine()
- w.append(comprehension.AccuVar())
- w.append(",")
- w.appendLine()
- w.append("// Init")
- w.appendLine()
- w.Buffer(comprehension.AccuInit())
- w.append(",")
- w.appendLine()
- w.append("// LoopCondition")
- w.appendLine()
- w.Buffer(comprehension.LoopCondition())
- w.append(",")
- w.appendLine()
- w.append("// LoopStep")
- w.appendLine()
- w.Buffer(comprehension.LoopStep())
- w.append(",")
- w.appendLine()
- w.append("// Result")
- w.appendLine()
- w.Buffer(comprehension.Result())
- w.append(")")
- w.removeIndent()
-}
-
-func formatLiteral(c ref.Val) string {
- switch v := c.(type) {
- case types.Bool:
- return fmt.Sprintf("%t", v)
- case types.Bytes:
- return fmt.Sprintf("b\"%s\"", string(v))
- case types.Double:
- return fmt.Sprintf("%v", float64(v))
- case types.Int:
- return fmt.Sprintf("%d", int64(v))
- case types.String:
- return strconv.Quote(string(v))
- case types.Uint:
- return fmt.Sprintf("%du", uint64(v))
- case types.Null:
- return "null"
- default:
- panic("Unknown constant type")
- }
-}
-
-func (w *debugWriter) append(s string) {
- w.doIndent()
- w.buffer.WriteString(s)
-}
-
-func (w *debugWriter) appendFormat(f string, args ...any) {
- w.append(fmt.Sprintf(f, args...))
-}
-
-func (w *debugWriter) doIndent() {
- if w.lineStart {
- w.lineStart = false
- w.buffer.WriteString(strings.Repeat(" ", w.indent))
- }
-}
-
-func (w *debugWriter) adorn(e any) {
- w.append(w.adorner.GetMetadata(e))
-}
-
-func (w *debugWriter) appendLine() {
- w.buffer.WriteString("\n")
- w.lineStart = true
-}
-
-func (w *debugWriter) addIndent() {
- w.indent++
-}
-
-func (w *debugWriter) removeIndent() {
- w.indent--
- if w.indent < 0 {
- panic("negative indent")
- }
-}
-
-func (w *debugWriter) String() string {
- return w.buffer.String()
-}
diff --git a/vendor/github.com/google/cel-go/common/decls/BUILD.bazel b/vendor/github.com/google/cel-go/common/decls/BUILD.bazel
deleted file mode 100644
index 17791dce6..000000000
--- a/vendor/github.com/google/cel-go/common/decls/BUILD.bazel
+++ /dev/null
@@ -1,39 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "decls.go",
- ],
- importpath = "github.com/google/cel-go/common/decls",
- deps = [
- "//checker/decls:go_default_library",
- "//common/functions:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- "//common/types/traits:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- srcs = [
- "decls_test.go",
- ],
- embed = [":go_default_library"],
- deps = [
- "//checker/decls:go_default_library",
- "//common/overloads:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- "//common/types/traits:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/decls/decls.go b/vendor/github.com/google/cel-go/common/decls/decls.go
deleted file mode 100644
index 734ebe57e..000000000
--- a/vendor/github.com/google/cel-go/common/decls/decls.go
+++ /dev/null
@@ -1,844 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package decls contains function and variable declaration structs and helper methods.
-package decls
-
-import (
- "fmt"
- "strings"
-
- chkdecls "github.com/google/cel-go/checker/decls"
- "github.com/google/cel-go/common/functions"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// NewFunction creates a new function declaration with a set of function options to configure overloads
-// and function definitions (implementations).
-//
-// Functions are checked for name collisions and singleton redefinition.
-func NewFunction(name string, opts ...FunctionOpt) (*FunctionDecl, error) {
- fn := &FunctionDecl{
- name: name,
- overloads: map[string]*OverloadDecl{},
- overloadOrdinals: []string{},
- }
- var err error
- for _, opt := range opts {
- fn, err = opt(fn)
- if err != nil {
- return nil, err
- }
- }
- if len(fn.overloads) == 0 {
- return nil, fmt.Errorf("function %s must have at least one overload", name)
- }
- return fn, nil
-}
-
-// FunctionDecl defines a function name, overload set, and optionally a singleton definition for all
-// overload instances.
-type FunctionDecl struct {
- name string
-
- // overloads associated with the function name.
- overloads map[string]*OverloadDecl
-
- // singleton implementation of the function for all overloads.
- //
- // If this option is set, an error will occur if any overloads specify a per-overload implementation
- // or if another function with the same name attempts to redefine the singleton.
- singleton *functions.Overload
-
- // disableTypeGuards is a performance optimization to disable detailed runtime type checks which could
- // add overhead on common operations. Setting this option true leaves error checks and argument checks
- // intact.
- disableTypeGuards bool
-
- // state indicates that the binding should be provided as a declaration, as a runtime binding, or both.
- state declarationState
-
- // overloadOrdinals indicates the order in which the overload was declared.
- overloadOrdinals []string
-}
-
-type declarationState int
-
-const (
- declarationStateUnset declarationState = iota
- declarationDisabled
- declarationEnabled
-)
-
-// Name returns the function name in human-readable terms, e.g. 'contains' of 'math.least'
-func (f *FunctionDecl) Name() string {
- if f == nil {
- return ""
- }
- return f.name
-}
-
-// IsDeclarationDisabled indicates that the function implementation should be added to the dispatcher, but the
-// declaration should not be exposed for use in expressions.
-func (f *FunctionDecl) IsDeclarationDisabled() bool {
- return f.state == declarationDisabled
-}
-
-// Merge combines an existing function declaration with another.
-//
-// If a function is extended, by say adding new overloads to an existing function, then it is merged with the
-// prior definition of the function at which point its overloads must not collide with pre-existing overloads
-// and its bindings (singleton, or per-overload) must not conflict with previous definitions either.
-func (f *FunctionDecl) Merge(other *FunctionDecl) (*FunctionDecl, error) {
- if f == other {
- return f, nil
- }
- if f.Name() != other.Name() {
- return nil, fmt.Errorf("cannot merge unrelated functions. %s and %s", f.Name(), other.Name())
- }
- merged := &FunctionDecl{
- name: f.Name(),
- overloads: make(map[string]*OverloadDecl, len(f.overloads)),
- singleton: f.singleton,
- overloadOrdinals: make([]string, len(f.overloads)),
- // if one function is expecting type-guards and the other is not, then they
- // must not be disabled.
- disableTypeGuards: f.disableTypeGuards && other.disableTypeGuards,
- // default to the current functions declaration state.
- state: f.state,
- }
- // If the other state indicates that the declaration should be explicitly enabled or
- // disabled, then update the merged state with the most recent value.
- if other.state != declarationStateUnset {
- merged.state = other.state
- }
- // baseline copy of the overloads and their ordinals
- copy(merged.overloadOrdinals, f.overloadOrdinals)
- for oID, o := range f.overloads {
- merged.overloads[oID] = o
- }
- // overloads and their ordinals are added from the left
- for _, oID := range other.overloadOrdinals {
- o := other.overloads[oID]
- err := merged.AddOverload(o)
- if err != nil {
- return nil, fmt.Errorf("function declaration merge failed: %v", err)
- }
- }
- if other.singleton != nil {
- if merged.singleton != nil && merged.singleton != other.singleton {
- return nil, fmt.Errorf("function already has a singleton binding: %s", f.Name())
- }
- merged.singleton = other.singleton
- }
- return merged, nil
-}
-
-// AddOverload ensures that the new overload does not collide with an existing overload signature;
-// however, if the function signatures are identical, the implementation may be rewritten as its
-// difficult to compare functions by object identity.
-func (f *FunctionDecl) AddOverload(overload *OverloadDecl) error {
- if f == nil {
- return fmt.Errorf("nil function cannot add overload: %s", overload.ID())
- }
- for oID, o := range f.overloads {
- if oID != overload.ID() && o.SignatureOverlaps(overload) {
- return fmt.Errorf("overload signature collision in function %s: %s collides with %s", f.Name(), oID, overload.ID())
- }
- if oID == overload.ID() {
- if o.SignatureEquals(overload) && o.IsNonStrict() == overload.IsNonStrict() {
- // Allow redefinition of an overload implementation so long as the signatures match.
- f.overloads[oID] = overload
- return nil
- }
- return fmt.Errorf("overload redefinition in function. %s: %s has multiple definitions", f.Name(), oID)
- }
- }
- f.overloadOrdinals = append(f.overloadOrdinals, overload.ID())
- f.overloads[overload.ID()] = overload
- return nil
-}
-
-// OverloadDecls returns the overload declarations in the order in which they were declared.
-func (f *FunctionDecl) OverloadDecls() []*OverloadDecl {
- if f == nil {
- return []*OverloadDecl{}
- }
- overloads := make([]*OverloadDecl, 0, len(f.overloads))
- for _, oID := range f.overloadOrdinals {
- overloads = append(overloads, f.overloads[oID])
- }
- return overloads
-}
-
-// Bindings produces a set of function bindings, if any are defined.
-func (f *FunctionDecl) Bindings() ([]*functions.Overload, error) {
- if f == nil {
- return []*functions.Overload{}, nil
- }
- overloads := []*functions.Overload{}
- nonStrict := false
- for _, oID := range f.overloadOrdinals {
- o := f.overloads[oID]
- if o.hasBinding() {
- overload := &functions.Overload{
- Operator: o.ID(),
- Unary: o.guardedUnaryOp(f.Name(), f.disableTypeGuards),
- Binary: o.guardedBinaryOp(f.Name(), f.disableTypeGuards),
- Function: o.guardedFunctionOp(f.Name(), f.disableTypeGuards),
- OperandTrait: o.OperandTrait(),
- NonStrict: o.IsNonStrict(),
- }
- overloads = append(overloads, overload)
- nonStrict = nonStrict || o.IsNonStrict()
- }
- }
- if f.singleton != nil {
- if len(overloads) != 0 {
- return nil, fmt.Errorf("singleton function incompatible with specialized overloads: %s", f.Name())
- }
- overloads = []*functions.Overload{
- {
- Operator: f.Name(),
- Unary: f.singleton.Unary,
- Binary: f.singleton.Binary,
- Function: f.singleton.Function,
- OperandTrait: f.singleton.OperandTrait,
- },
- }
- // fall-through to return single overload case.
- }
- if len(overloads) == 0 {
- return overloads, nil
- }
- // Single overload. Replicate an entry for it using the function name as well.
- if len(overloads) == 1 {
- if overloads[0].Operator == f.Name() {
- return overloads, nil
- }
- return append(overloads, &functions.Overload{
- Operator: f.Name(),
- Unary: overloads[0].Unary,
- Binary: overloads[0].Binary,
- Function: overloads[0].Function,
- NonStrict: overloads[0].NonStrict,
- OperandTrait: overloads[0].OperandTrait,
- }), nil
- }
- // All of the defined overloads are wrapped into a top-level function which
- // performs dynamic dispatch to the proper overload based on the argument types.
- bindings := append([]*functions.Overload{}, overloads...)
- funcDispatch := func(args ...ref.Val) ref.Val {
- for _, oID := range f.overloadOrdinals {
- o := f.overloads[oID]
- // During dynamic dispatch over multiple functions, signature agreement checks
- // are preserved in order to assist with the function resolution step.
- switch len(args) {
- case 1:
- if o.unaryOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) {
- return o.unaryOp(args[0])
- }
- case 2:
- if o.binaryOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) {
- return o.binaryOp(args[0], args[1])
- }
- }
- if o.functionOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) {
- return o.functionOp(args...)
- }
- // eventually this will fall through to the noSuchOverload below.
- }
- return MaybeNoSuchOverload(f.Name(), args...)
- }
- function := &functions.Overload{
- Operator: f.Name(),
- Function: funcDispatch,
- NonStrict: nonStrict,
- }
- return append(bindings, function), nil
-}
-
-// MaybeNoSuchOverload determines whether to propagate an error if one is provided as an argument, or
-// to return an unknown set, or to produce a new error for a missing function signature.
-func MaybeNoSuchOverload(funcName string, args ...ref.Val) ref.Val {
- argTypes := make([]string, len(args))
- var unk *types.Unknown = nil
- for i, arg := range args {
- if types.IsError(arg) {
- return arg
- }
- if types.IsUnknown(arg) {
- unk = types.MergeUnknowns(arg.(*types.Unknown), unk)
- }
- argTypes[i] = arg.Type().TypeName()
- }
- if unk != nil {
- return unk
- }
- signature := strings.Join(argTypes, ", ")
- return types.NewErr("no such overload: %s(%s)", funcName, signature)
-}
-
-// FunctionOpt defines a functional option for mutating a function declaration.
-type FunctionOpt func(*FunctionDecl) (*FunctionDecl, error)
-
-// DisableTypeGuards disables automatically generated function invocation guards on direct overload calls.
-// Type guards remain on during dynamic dispatch for parsed-only expressions.
-func DisableTypeGuards(value bool) FunctionOpt {
- return func(fn *FunctionDecl) (*FunctionDecl, error) {
- fn.disableTypeGuards = value
- return fn, nil
- }
-}
-
-// DisableDeclaration indicates that the function declaration should be disabled, but the runtime function
-// binding should be provided. Marking a function as runtime-only is a safe way to manage deprecations
-// of function declarations while still preserving the runtime behavior for previously compiled expressions.
-func DisableDeclaration(value bool) FunctionOpt {
- return func(fn *FunctionDecl) (*FunctionDecl, error) {
- if value {
- fn.state = declarationDisabled
- } else {
- fn.state = declarationEnabled
- }
- return fn, nil
- }
-}
-
-// SingletonUnaryBinding creates a singleton function definition to be used for all function overloads.
-//
-// Note, this approach works well if operand is expected to have a specific trait which it implements,
-// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings.
-func SingletonUnaryBinding(fn functions.UnaryOp, traits ...int) FunctionOpt {
- trait := 0
- for _, t := range traits {
- trait = trait | t
- }
- return func(f *FunctionDecl) (*FunctionDecl, error) {
- if f.singleton != nil {
- return nil, fmt.Errorf("function already has a singleton binding: %s", f.Name())
- }
- f.singleton = &functions.Overload{
- Operator: f.Name(),
- Unary: fn,
- OperandTrait: trait,
- }
- return f, nil
- }
-}
-
-// SingletonBinaryBinding creates a singleton function definition to be used with all function overloads.
-//
-// Note, this approach works well if operand is expected to have a specific trait which it implements,
-// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings.
-func SingletonBinaryBinding(fn functions.BinaryOp, traits ...int) FunctionOpt {
- trait := 0
- for _, t := range traits {
- trait = trait | t
- }
- return func(f *FunctionDecl) (*FunctionDecl, error) {
- if f.singleton != nil {
- return nil, fmt.Errorf("function already has a singleton binding: %s", f.Name())
- }
- f.singleton = &functions.Overload{
- Operator: f.Name(),
- Binary: fn,
- OperandTrait: trait,
- }
- return f, nil
- }
-}
-
-// SingletonFunctionBinding creates a singleton function definition to be used with all function overloads.
-//
-// Note, this approach works well if operand is expected to have a specific trait which it implements,
-// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings.
-func SingletonFunctionBinding(fn functions.FunctionOp, traits ...int) FunctionOpt {
- trait := 0
- for _, t := range traits {
- trait = trait | t
- }
- return func(f *FunctionDecl) (*FunctionDecl, error) {
- if f.singleton != nil {
- return nil, fmt.Errorf("function already has a singleton binding: %s", f.Name())
- }
- f.singleton = &functions.Overload{
- Operator: f.Name(),
- Function: fn,
- OperandTrait: trait,
- }
- return f, nil
- }
-}
-
-// Overload defines a new global overload with an overload id, argument types, and result type. Through the
-// use of OverloadOpt options, the overload may also be configured with a binding, an operand trait, and to
-// be non-strict.
-//
-// Note: function bindings should be commonly configured with Overload instances whereas operand traits and
-// strict-ness should be rare occurrences.
-func Overload(overloadID string,
- args []*types.Type, resultType *types.Type,
- opts ...OverloadOpt) FunctionOpt {
- return newOverload(overloadID, false, args, resultType, opts...)
-}
-
-// MemberOverload defines a new receiver-style overload (or member function) with an overload id, argument types,
-// and result type. Through the use of OverloadOpt options, the overload may also be configured with a binding,
-// an operand trait, and to be non-strict.
-//
-// Note: function bindings should be commonly configured with Overload instances whereas operand traits and
-// strict-ness should be rare occurrences.
-func MemberOverload(overloadID string,
- args []*types.Type, resultType *types.Type,
- opts ...OverloadOpt) FunctionOpt {
- return newOverload(overloadID, true, args, resultType, opts...)
-}
-
-func newOverload(overloadID string,
- memberFunction bool, args []*types.Type, resultType *types.Type,
- opts ...OverloadOpt) FunctionOpt {
- return func(f *FunctionDecl) (*FunctionDecl, error) {
- overload, err := newOverloadInternal(overloadID, memberFunction, args, resultType, opts...)
- if err != nil {
- return nil, err
- }
- err = f.AddOverload(overload)
- if err != nil {
- return nil, err
- }
- return f, nil
- }
-}
-
-func newOverloadInternal(overloadID string,
- memberFunction bool, args []*types.Type, resultType *types.Type,
- opts ...OverloadOpt) (*OverloadDecl, error) {
- overload := &OverloadDecl{
- id: overloadID,
- argTypes: args,
- resultType: resultType,
- isMemberFunction: memberFunction,
- }
- var err error
- for _, opt := range opts {
- overload, err = opt(overload)
- if err != nil {
- return nil, err
- }
- }
- return overload, nil
-}
-
-// OverloadDecl contains the definition of a single overload id with a specific signature, and an optional
-// implementation.
-type OverloadDecl struct {
- id string
- argTypes []*types.Type
- resultType *types.Type
- isMemberFunction bool
- // nonStrict indicates that the function will accept error and unknown arguments as inputs.
- nonStrict bool
- // operandTrait indicates whether the member argument should have a specific type-trait.
- //
- // This is useful for creating overloads which operate on a type-interface rather than a concrete type.
- operandTrait int
-
- // Function implementation options. Optional, but encouraged.
- // unaryOp is a function binding that takes a single argument.
- unaryOp functions.UnaryOp
- // binaryOp is a function binding that takes two arguments.
- binaryOp functions.BinaryOp
- // functionOp is a catch-all for zero-arity and three-plus arity functions.
- functionOp functions.FunctionOp
-}
-
-// ID mirrors the overload signature and provides a unique id which may be referenced within the type-checker
-// and interpreter to optimize performance.
-//
-// The ID format is usually one of two styles:
-// global: __
-// member: ___
-func (o *OverloadDecl) ID() string {
- if o == nil {
- return ""
- }
- return o.id
-}
-
-// ArgTypes contains the set of argument types expected by the overload.
-//
-// For member functions ArgTypes[0] represents the member operand type.
-func (o *OverloadDecl) ArgTypes() []*types.Type {
- if o == nil {
- return emptyArgs
- }
- return o.argTypes
-}
-
-// IsMemberFunction indicates whether the overload is a member function
-func (o *OverloadDecl) IsMemberFunction() bool {
- if o == nil {
- return false
- }
- return o.isMemberFunction
-}
-
-// IsNonStrict returns whether the overload accepts errors and unknown values as arguments.
-func (o *OverloadDecl) IsNonStrict() bool {
- if o == nil {
- return false
- }
- return o.nonStrict
-}
-
-// OperandTrait returns the trait mask of the first operand to the overload call, e.g.
-// `traits.Indexer`
-func (o *OverloadDecl) OperandTrait() int {
- if o == nil {
- return 0
- }
- return o.operandTrait
-}
-
-// ResultType indicates the output type from calling the function.
-func (o *OverloadDecl) ResultType() *types.Type {
- if o == nil {
- // *types.Type is nil-safe
- return nil
- }
- return o.resultType
-}
-
-// TypeParams returns the type parameter names associated with the overload.
-func (o *OverloadDecl) TypeParams() []string {
- typeParams := map[string]struct{}{}
- collectParamNames(typeParams, o.ResultType())
- for _, arg := range o.ArgTypes() {
- collectParamNames(typeParams, arg)
- }
- params := make([]string, 0, len(typeParams))
- for param := range typeParams {
- params = append(params, param)
- }
- return params
-}
-
-// SignatureEquals determines whether the incoming overload declaration signature is equal to the current signature.
-//
-// Result type, operand trait, and strict-ness are not considered as part of signature equality.
-func (o *OverloadDecl) SignatureEquals(other *OverloadDecl) bool {
- if o == other {
- return true
- }
- if o.ID() != other.ID() || o.IsMemberFunction() != other.IsMemberFunction() || len(o.ArgTypes()) != len(other.ArgTypes()) {
- return false
- }
- for i, at := range o.ArgTypes() {
- oat := other.ArgTypes()[i]
- if !at.IsEquivalentType(oat) {
- return false
- }
- }
- return o.ResultType().IsEquivalentType(other.ResultType())
-}
-
-// SignatureOverlaps indicates whether two functions have non-equal, but overloapping function signatures.
-//
-// For example, list(dyn) collides with list(string) since the 'dyn' type can contain a 'string' type.
-func (o *OverloadDecl) SignatureOverlaps(other *OverloadDecl) bool {
- if o.IsMemberFunction() != other.IsMemberFunction() || len(o.ArgTypes()) != len(other.ArgTypes()) {
- return false
- }
- argsOverlap := true
- for i, argType := range o.ArgTypes() {
- otherArgType := other.ArgTypes()[i]
- argsOverlap = argsOverlap &&
- (argType.IsAssignableType(otherArgType) ||
- otherArgType.IsAssignableType(argType))
- }
- return argsOverlap
-}
-
-// hasBinding indicates whether the overload already has a definition.
-func (o *OverloadDecl) hasBinding() bool {
- return o != nil && (o.unaryOp != nil || o.binaryOp != nil || o.functionOp != nil)
-}
-
-// guardedUnaryOp creates an invocation guard around the provided unary operator, if one is defined.
-func (o *OverloadDecl) guardedUnaryOp(funcName string, disableTypeGuards bool) functions.UnaryOp {
- if o.unaryOp == nil {
- return nil
- }
- return func(arg ref.Val) ref.Val {
- if !o.matchesRuntimeUnarySignature(disableTypeGuards, arg) {
- return MaybeNoSuchOverload(funcName, arg)
- }
- return o.unaryOp(arg)
- }
-}
-
-// guardedBinaryOp creates an invocation guard around the provided binary operator, if one is defined.
-func (o *OverloadDecl) guardedBinaryOp(funcName string, disableTypeGuards bool) functions.BinaryOp {
- if o.binaryOp == nil {
- return nil
- }
- return func(arg1, arg2 ref.Val) ref.Val {
- if !o.matchesRuntimeBinarySignature(disableTypeGuards, arg1, arg2) {
- return MaybeNoSuchOverload(funcName, arg1, arg2)
- }
- return o.binaryOp(arg1, arg2)
- }
-}
-
-// guardedFunctionOp creates an invocation guard around the provided variadic function binding, if one is provided.
-func (o *OverloadDecl) guardedFunctionOp(funcName string, disableTypeGuards bool) functions.FunctionOp {
- if o.functionOp == nil {
- return nil
- }
- return func(args ...ref.Val) ref.Val {
- if !o.matchesRuntimeSignature(disableTypeGuards, args...) {
- return MaybeNoSuchOverload(funcName, args...)
- }
- return o.functionOp(args...)
- }
-}
-
-// matchesRuntimeUnarySignature indicates whether the argument type is runtime assiganble to the overload's expected argument.
-func (o *OverloadDecl) matchesRuntimeUnarySignature(disableTypeGuards bool, arg ref.Val) bool {
- return matchRuntimeArgType(o.IsNonStrict(), disableTypeGuards, o.ArgTypes()[0], arg) &&
- matchOperandTrait(o.OperandTrait(), arg)
-}
-
-// matchesRuntimeBinarySignature indicates whether the argument types are runtime assiganble to the overload's expected arguments.
-func (o *OverloadDecl) matchesRuntimeBinarySignature(disableTypeGuards bool, arg1, arg2 ref.Val) bool {
- return matchRuntimeArgType(o.IsNonStrict(), disableTypeGuards, o.ArgTypes()[0], arg1) &&
- matchRuntimeArgType(o.IsNonStrict(), disableTypeGuards, o.ArgTypes()[1], arg2) &&
- matchOperandTrait(o.OperandTrait(), arg1)
-}
-
-// matchesRuntimeSignature indicates whether the argument types are runtime assiganble to the overload's expected arguments.
-func (o *OverloadDecl) matchesRuntimeSignature(disableTypeGuards bool, args ...ref.Val) bool {
- if len(args) != len(o.ArgTypes()) {
- return false
- }
- if len(args) == 0 {
- return true
- }
- for i, arg := range args {
- if !matchRuntimeArgType(o.IsNonStrict(), disableTypeGuards, o.ArgTypes()[i], arg) {
- return false
- }
- }
- return matchOperandTrait(o.OperandTrait(), args[0])
-}
-
-func matchRuntimeArgType(nonStrict, disableTypeGuards bool, argType *types.Type, arg ref.Val) bool {
- if nonStrict && (disableTypeGuards || types.IsUnknownOrError(arg)) {
- return true
- }
- if types.IsUnknownOrError(arg) {
- return false
- }
- return disableTypeGuards || argType.IsAssignableRuntimeType(arg)
-}
-
-func matchOperandTrait(trait int, arg ref.Val) bool {
- return trait == 0 || arg.Type().HasTrait(trait) || types.IsUnknownOrError(arg)
-}
-
-// OverloadOpt is a functional option for configuring a function overload.
-type OverloadOpt func(*OverloadDecl) (*OverloadDecl, error)
-
-// UnaryBinding provides the implementation of a unary overload. The provided function is protected by a runtime
-// type-guard which ensures runtime type agreement between the overload signature and runtime argument types.
-func UnaryBinding(binding functions.UnaryOp) OverloadOpt {
- return func(o *OverloadDecl) (*OverloadDecl, error) {
- if o.hasBinding() {
- return nil, fmt.Errorf("overload already has a binding: %s", o.ID())
- }
- if len(o.ArgTypes()) != 1 {
- return nil, fmt.Errorf("unary function bound to non-unary overload: %s", o.ID())
- }
- o.unaryOp = binding
- return o, nil
- }
-}
-
-// BinaryBinding provides the implementation of a binary overload. The provided function is protected by a runtime
-// type-guard which ensures runtime type agreement between the overload signature and runtime argument types.
-func BinaryBinding(binding functions.BinaryOp) OverloadOpt {
- return func(o *OverloadDecl) (*OverloadDecl, error) {
- if o.hasBinding() {
- return nil, fmt.Errorf("overload already has a binding: %s", o.ID())
- }
- if len(o.ArgTypes()) != 2 {
- return nil, fmt.Errorf("binary function bound to non-binary overload: %s", o.ID())
- }
- o.binaryOp = binding
- return o, nil
- }
-}
-
-// FunctionBinding provides the implementation of a variadic overload. The provided function is protected by a runtime
-// type-guard which ensures runtime type agreement between the overload signature and runtime argument types.
-func FunctionBinding(binding functions.FunctionOp) OverloadOpt {
- return func(o *OverloadDecl) (*OverloadDecl, error) {
- if o.hasBinding() {
- return nil, fmt.Errorf("overload already has a binding: %s", o.ID())
- }
- o.functionOp = binding
- return o, nil
- }
-}
-
-// OverloadIsNonStrict enables the function to be called with error and unknown argument values.
-//
-// Note: do not use this option unless absoluately necessary as it should be an uncommon feature.
-func OverloadIsNonStrict() OverloadOpt {
- return func(o *OverloadDecl) (*OverloadDecl, error) {
- o.nonStrict = true
- return o, nil
- }
-}
-
-// OverloadOperandTrait configures a set of traits which the first argument to the overload must implement in order to be
-// successfully invoked.
-func OverloadOperandTrait(trait int) OverloadOpt {
- return func(o *OverloadDecl) (*OverloadDecl, error) {
- o.operandTrait = trait
- return o, nil
- }
-}
-
-// NewConstant creates a new constant declaration.
-func NewConstant(name string, t *types.Type, v ref.Val) *VariableDecl {
- return &VariableDecl{name: name, varType: t, value: v}
-}
-
-// NewVariable creates a new variable declaration.
-func NewVariable(name string, t *types.Type) *VariableDecl {
- return &VariableDecl{name: name, varType: t}
-}
-
-// VariableDecl defines a variable declaration which may optionally have a constant value.
-type VariableDecl struct {
- name string
- varType *types.Type
- value ref.Val
-}
-
-// Name returns the fully-qualified variable name
-func (v *VariableDecl) Name() string {
- if v == nil {
- return ""
- }
- return v.name
-}
-
-// Type returns the types.Type value associated with the variable.
-func (v *VariableDecl) Type() *types.Type {
- if v == nil {
- // types.Type is nil-safe
- return nil
- }
- return v.varType
-}
-
-// Value returns the constant value associated with the declaration.
-func (v *VariableDecl) Value() ref.Val {
- if v == nil {
- return nil
- }
- return v.value
-}
-
-// DeclarationIsEquivalent returns true if one variable declaration has the same name and same type as the input.
-func (v *VariableDecl) DeclarationIsEquivalent(other *VariableDecl) bool {
- if v == other {
- return true
- }
- return v.Name() == other.Name() && v.Type().IsEquivalentType(other.Type())
-}
-
-// VariableDeclToExprDecl converts a go-native variable declaration into a protobuf-type variable declaration.
-func VariableDeclToExprDecl(v *VariableDecl) (*exprpb.Decl, error) {
- varType, err := types.TypeToExprType(v.Type())
- if err != nil {
- return nil, err
- }
- return chkdecls.NewVar(v.Name(), varType), nil
-}
-
-// TypeVariable creates a new type identifier for use within a types.Provider
-func TypeVariable(t *types.Type) *VariableDecl {
- return NewVariable(t.TypeName(), types.NewTypeTypeWithParam(t))
-}
-
-// FunctionDeclToExprDecl converts a go-native function declaration into a protobuf-typed function declaration.
-func FunctionDeclToExprDecl(f *FunctionDecl) (*exprpb.Decl, error) {
- overloads := make([]*exprpb.Decl_FunctionDecl_Overload, len(f.overloads))
- for i, oID := range f.overloadOrdinals {
- o := f.overloads[oID]
- paramNames := map[string]struct{}{}
- argTypes := make([]*exprpb.Type, len(o.ArgTypes()))
- for j, a := range o.ArgTypes() {
- collectParamNames(paramNames, a)
- at, err := types.TypeToExprType(a)
- if err != nil {
- return nil, err
- }
- argTypes[j] = at
- }
- collectParamNames(paramNames, o.ResultType())
- resultType, err := types.TypeToExprType(o.ResultType())
- if err != nil {
- return nil, err
- }
- if len(paramNames) == 0 {
- if o.IsMemberFunction() {
- overloads[i] = chkdecls.NewInstanceOverload(oID, argTypes, resultType)
- } else {
- overloads[i] = chkdecls.NewOverload(oID, argTypes, resultType)
- }
- } else {
- params := []string{}
- for pn := range paramNames {
- params = append(params, pn)
- }
- if o.IsMemberFunction() {
- overloads[i] = chkdecls.NewParameterizedInstanceOverload(oID, argTypes, resultType, params)
- } else {
- overloads[i] = chkdecls.NewParameterizedOverload(oID, argTypes, resultType, params)
- }
- }
- }
- return chkdecls.NewFunction(f.Name(), overloads...), nil
-}
-
-func collectParamNames(paramNames map[string]struct{}, arg *types.Type) {
- if arg.Kind() == types.TypeParamKind {
- paramNames[arg.TypeName()] = struct{}{}
- }
- for _, param := range arg.Parameters() {
- collectParamNames(paramNames, param)
- }
-}
-
-var (
- emptyArgs = []*types.Type{}
-)
diff --git a/vendor/github.com/google/cel-go/common/doc.go b/vendor/github.com/google/cel-go/common/doc.go
deleted file mode 100644
index 5362fdfe4..000000000
--- a/vendor/github.com/google/cel-go/common/doc.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package common defines types and utilities common to expression parsing,
-// checking, and interpretation
-package common
diff --git a/vendor/github.com/google/cel-go/common/error.go b/vendor/github.com/google/cel-go/common/error.go
deleted file mode 100644
index 774dcb5b4..000000000
--- a/vendor/github.com/google/cel-go/common/error.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package common
-
-import (
- "fmt"
- "strings"
- "unicode/utf8"
-
- "golang.org/x/text/width"
-)
-
-// NewError creates an error associated with an expression id with the given message at the given location.
-func NewError(id int64, message string, location Location) *Error {
- return &Error{Message: message, Location: location, ExprID: id}
-}
-
-// Error type which references an expression id, a location within source, and a message.
-type Error struct {
- Location Location
- Message string
- ExprID int64
-}
-
-const (
- dot = "."
- ind = "^"
-
- // maxSnippetLength is the largest number of characters which can be rendered in an error message snippet.
- maxSnippetLength = 16384
-)
-
-var (
- wideDot = width.Widen.String(dot)
- wideInd = width.Widen.String(ind)
-)
-
-// ToDisplayString decorates the error message with the source location.
-func (e *Error) ToDisplayString(source Source) string {
- var result = fmt.Sprintf("ERROR: %s:%d:%d: %s",
- source.Description(),
- e.Location.Line(),
- e.Location.Column()+1, // add one to the 0-based column for display
- e.Message)
- if snippet, found := source.Snippet(e.Location.Line()); found && len(snippet) <= maxSnippetLength {
- snippet := strings.Replace(snippet, "\t", " ", -1)
- srcLine := "\n | " + snippet
- var bytes = []byte(snippet)
- var indLine = "\n | "
- for i := 0; i < e.Location.Column() && len(bytes) > 0; i++ {
- _, sz := utf8.DecodeRune(bytes)
- bytes = bytes[sz:]
- if sz > 1 {
- indLine += wideDot
- } else {
- indLine += dot
- }
- }
- if _, sz := utf8.DecodeRune(bytes); sz > 1 {
- indLine += wideInd
- } else {
- indLine += ind
- }
- result += srcLine + indLine
- }
- return result
-}
diff --git a/vendor/github.com/google/cel-go/common/errors.go b/vendor/github.com/google/cel-go/common/errors.go
deleted file mode 100644
index 25adc73d8..000000000
--- a/vendor/github.com/google/cel-go/common/errors.go
+++ /dev/null
@@ -1,103 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package common
-
-import (
- "fmt"
- "sort"
- "strings"
-)
-
-// Errors type which contains a list of errors observed during parsing.
-type Errors struct {
- errors []*Error
- source Source
- numErrors int
- maxErrorsToReport int
-}
-
-// NewErrors creates a new instance of the Errors type.
-func NewErrors(source Source) *Errors {
- return &Errors{
- errors: []*Error{},
- source: source,
- maxErrorsToReport: 100,
- }
-}
-
-// ReportError records an error at a source location.
-func (e *Errors) ReportError(l Location, format string, args ...any) {
- e.ReportErrorAtID(0, l, format, args...)
-}
-
-// ReportErrorAtID records an error at a source location and expression id.
-func (e *Errors) ReportErrorAtID(id int64, l Location, format string, args ...any) {
- e.numErrors++
- if e.numErrors > e.maxErrorsToReport {
- return
- }
- err := &Error{
- ExprID: id,
- Location: l,
- Message: fmt.Sprintf(format, args...),
- }
- e.errors = append(e.errors, err)
-}
-
-// GetErrors returns the list of observed errors.
-func (e *Errors) GetErrors() []*Error {
- return e.errors[:]
-}
-
-// Append creates a new Errors object with the current and input errors.
-func (e *Errors) Append(errs []*Error) *Errors {
- return &Errors{
- errors: append(e.errors[:], errs...),
- source: e.source,
- numErrors: e.numErrors + len(errs),
- maxErrorsToReport: e.maxErrorsToReport,
- }
-}
-
-// ToDisplayString returns the error set to a newline delimited string.
-func (e *Errors) ToDisplayString() string {
- errorsInString := e.maxErrorsToReport
- if e.numErrors > e.maxErrorsToReport {
- // add one more error to indicate the number of errors truncated.
- errorsInString++
- } else {
- // otherwise the error set will just contain the number of errors.
- errorsInString = e.numErrors
- }
-
- result := make([]string, errorsInString)
- sort.SliceStable(e.errors, func(i, j int) bool {
- ei := e.errors[i].Location
- ej := e.errors[j].Location
- return ei.Line() < ej.Line() ||
- (ei.Line() == ej.Line() && ei.Column() < ej.Column())
- })
- for i, err := range e.errors {
- // This can happen during the append of two errors objects
- if i >= e.maxErrorsToReport {
- break
- }
- result[i] = err.ToDisplayString(e.source)
- }
- if e.numErrors > e.maxErrorsToReport {
- result[e.maxErrorsToReport] = fmt.Sprintf("%d more errors were truncated", e.numErrors-e.maxErrorsToReport)
- }
- return strings.Join(result, "\n")
-}
diff --git a/vendor/github.com/google/cel-go/common/functions/BUILD.bazel b/vendor/github.com/google/cel-go/common/functions/BUILD.bazel
deleted file mode 100644
index 3cc27d60c..000000000
--- a/vendor/github.com/google/cel-go/common/functions/BUILD.bazel
+++ /dev/null
@@ -1,17 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "functions.go",
- ],
- importpath = "github.com/google/cel-go/common/functions",
- deps = [
- "//common/types/ref:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/functions/functions.go b/vendor/github.com/google/cel-go/common/functions/functions.go
deleted file mode 100644
index 67f4a5944..000000000
--- a/vendor/github.com/google/cel-go/common/functions/functions.go
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package functions defines the standard builtin functions supported by the interpreter
-package functions
-
-import "github.com/google/cel-go/common/types/ref"
-
-// Overload defines a named overload of a function, indicating an operand trait
-// which must be present on the first argument to the overload as well as one
-// of either a unary, binary, or function implementation.
-//
-// The majority of operators within the expression language are unary or binary
-// and the specializations simplify the call contract for implementers of
-// types with operator overloads. Any added complexity is assumed to be handled
-// by the generic FunctionOp.
-type Overload struct {
- // Operator name as written in an expression or defined within
- // operators.go.
- Operator string
-
- // Operand trait used to dispatch the call. The zero-value indicates a
- // global function overload or that one of the Unary / Binary / Function
- // definitions should be used to execute the call.
- OperandTrait int
-
- // Unary defines the overload with a UnaryOp implementation. May be nil.
- Unary UnaryOp
-
- // Binary defines the overload with a BinaryOp implementation. May be nil.
- Binary BinaryOp
-
- // Function defines the overload with a FunctionOp implementation. May be
- // nil.
- Function FunctionOp
-
- // NonStrict specifies whether the Overload will tolerate arguments that
- // are types.Err or types.Unknown.
- NonStrict bool
-}
-
-// UnaryOp is a function that takes a single value and produces an output.
-type UnaryOp func(value ref.Val) ref.Val
-
-// BinaryOp is a function that takes two values and produces an output.
-type BinaryOp func(lhs ref.Val, rhs ref.Val) ref.Val
-
-// FunctionOp is a function with accepts zero or more arguments and produces
-// a value or error as a result.
-type FunctionOp func(values ...ref.Val) ref.Val
diff --git a/vendor/github.com/google/cel-go/common/location.go b/vendor/github.com/google/cel-go/common/location.go
deleted file mode 100644
index ec3fa7cb5..000000000
--- a/vendor/github.com/google/cel-go/common/location.go
+++ /dev/null
@@ -1,51 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package common
-
-// Location interface to represent a location within Source.
-type Location interface {
- Line() int // 1-based line number within source.
- Column() int // 0-based column number within source.
-}
-
-// SourceLocation helper type to manually construct a location.
-type SourceLocation struct {
- line int
- column int
-}
-
-var (
- // Location implements the SourceLocation interface.
- _ Location = &SourceLocation{}
- // NoLocation is a particular illegal location.
- NoLocation = &SourceLocation{-1, -1}
-)
-
-// NewLocation creates a new location.
-func NewLocation(line, column int) Location {
- return &SourceLocation{
- line: line,
- column: column}
-}
-
-// Line returns the 1-based line of the location.
-func (l *SourceLocation) Line() int {
- return l.line
-}
-
-// Column returns the 0-based column number of the location.
-func (l *SourceLocation) Column() int {
- return l.column
-}
diff --git a/vendor/github.com/google/cel-go/common/operators/BUILD.bazel b/vendor/github.com/google/cel-go/common/operators/BUILD.bazel
deleted file mode 100644
index b5b67f062..000000000
--- a/vendor/github.com/google/cel-go/common/operators/BUILD.bazel
+++ /dev/null
@@ -1,14 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "operators.go",
- ],
- importpath = "github.com/google/cel-go/common/operators",
-)
diff --git a/vendor/github.com/google/cel-go/common/operators/operators.go b/vendor/github.com/google/cel-go/common/operators/operators.go
deleted file mode 100644
index f9b39bda3..000000000
--- a/vendor/github.com/google/cel-go/common/operators/operators.go
+++ /dev/null
@@ -1,157 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package operators defines the internal function names of operators.
-//
-// All operators in the expression language are modelled as function calls.
-package operators
-
-// String "names" for CEL operators.
-const (
- // Symbolic operators.
- Conditional = "_?_:_"
- LogicalAnd = "_&&_"
- LogicalOr = "_||_"
- LogicalNot = "!_"
- Equals = "_==_"
- NotEquals = "_!=_"
- Less = "_<_"
- LessEquals = "_<=_"
- Greater = "_>_"
- GreaterEquals = "_>=_"
- Add = "_+_"
- Subtract = "_-_"
- Multiply = "_*_"
- Divide = "_/_"
- Modulo = "_%_"
- Negate = "-_"
- Index = "_[_]"
- OptIndex = "_[?_]"
- OptSelect = "_?._"
-
- // Macros, must have a valid identifier.
- Has = "has"
- All = "all"
- Exists = "exists"
- ExistsOne = "exists_one"
- Map = "map"
- Filter = "filter"
-
- // Named operators, must not have be valid identifiers.
- NotStrictlyFalse = "@not_strictly_false"
- In = "@in"
-
- // Deprecated: named operators with valid identifiers.
- OldNotStrictlyFalse = "__not_strictly_false__"
- OldIn = "_in_"
-)
-
-var (
- operators = map[string]string{
- "+": Add,
- "/": Divide,
- "==": Equals,
- ">": Greater,
- ">=": GreaterEquals,
- "in": In,
- "<": Less,
- "<=": LessEquals,
- "%": Modulo,
- "*": Multiply,
- "!=": NotEquals,
- "-": Subtract,
- }
- // operatorMap of the operator symbol which refers to a struct containing the display name,
- // if applicable, the operator precedence, and the arity.
- //
- // If the symbol does not have a display name listed in the map, it is only because it requires
- // special casing to render properly as text.
- operatorMap = map[string]struct {
- displayName string
- precedence int
- arity int
- }{
- Conditional: {displayName: "", precedence: 8, arity: 3},
- LogicalOr: {displayName: "||", precedence: 7, arity: 2},
- LogicalAnd: {displayName: "&&", precedence: 6, arity: 2},
- Equals: {displayName: "==", precedence: 5, arity: 2},
- Greater: {displayName: ">", precedence: 5, arity: 2},
- GreaterEquals: {displayName: ">=", precedence: 5, arity: 2},
- In: {displayName: "in", precedence: 5, arity: 2},
- Less: {displayName: "<", precedence: 5, arity: 2},
- LessEquals: {displayName: "<=", precedence: 5, arity: 2},
- NotEquals: {displayName: "!=", precedence: 5, arity: 2},
- OldIn: {displayName: "in", precedence: 5, arity: 2},
- Add: {displayName: "+", precedence: 4, arity: 2},
- Subtract: {displayName: "-", precedence: 4, arity: 2},
- Divide: {displayName: "/", precedence: 3, arity: 2},
- Modulo: {displayName: "%", precedence: 3, arity: 2},
- Multiply: {displayName: "*", precedence: 3, arity: 2},
- LogicalNot: {displayName: "!", precedence: 2, arity: 1},
- Negate: {displayName: "-", precedence: 2, arity: 1},
- Index: {displayName: "", precedence: 1, arity: 2},
- OptIndex: {displayName: "", precedence: 1, arity: 2},
- OptSelect: {displayName: "", precedence: 1, arity: 2},
- }
-)
-
-// Find the internal function name for an operator, if the input text is one.
-func Find(text string) (string, bool) {
- op, found := operators[text]
- return op, found
-}
-
-// FindReverse returns the unmangled, text representation of the operator.
-func FindReverse(symbol string) (string, bool) {
- op, found := operatorMap[symbol]
- if !found {
- return "", false
- }
- return op.displayName, true
-}
-
-// FindReverseBinaryOperator returns the unmangled, text representation of a binary operator.
-//
-// If the symbol does refer to an operator, but the operator does not have a display name the
-// result is false.
-func FindReverseBinaryOperator(symbol string) (string, bool) {
- op, found := operatorMap[symbol]
- if !found || op.arity != 2 {
- return "", false
- }
- if op.displayName == "" {
- return "", false
- }
- return op.displayName, true
-}
-
-// Precedence returns the operator precedence, where the higher the number indicates
-// higher precedence operations.
-func Precedence(symbol string) int {
- op, found := operatorMap[symbol]
- if !found {
- return 0
- }
- return op.precedence
-}
-
-// Arity returns the number of argument the operator takes
-// -1 is returned if an undefined symbol is provided
-func Arity(symbol string) int {
- op, found := operatorMap[symbol]
- if !found {
- return -1
- }
- return op.arity
-}
diff --git a/vendor/github.com/google/cel-go/common/overloads/BUILD.bazel b/vendor/github.com/google/cel-go/common/overloads/BUILD.bazel
deleted file mode 100644
index e46e2f483..000000000
--- a/vendor/github.com/google/cel-go/common/overloads/BUILD.bazel
+++ /dev/null
@@ -1,14 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "overloads.go",
- ],
- importpath = "github.com/google/cel-go/common/overloads",
-)
diff --git a/vendor/github.com/google/cel-go/common/overloads/overloads.go b/vendor/github.com/google/cel-go/common/overloads/overloads.go
deleted file mode 100644
index 9d50f4367..000000000
--- a/vendor/github.com/google/cel-go/common/overloads/overloads.go
+++ /dev/null
@@ -1,327 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package overloads defines the internal overload identifiers for function and
-// operator overloads.
-package overloads
-
-// Boolean logic overloads
-const (
- Conditional = "conditional"
- LogicalAnd = "logical_and"
- LogicalOr = "logical_or"
- LogicalNot = "logical_not"
- NotStrictlyFalse = "not_strictly_false"
- Equals = "equals"
- NotEquals = "not_equals"
- LessBool = "less_bool"
- LessInt64 = "less_int64"
- LessInt64Double = "less_int64_double"
- LessInt64Uint64 = "less_int64_uint64"
- LessUint64 = "less_uint64"
- LessUint64Double = "less_uint64_double"
- LessUint64Int64 = "less_uint64_int64"
- LessDouble = "less_double"
- LessDoubleInt64 = "less_double_int64"
- LessDoubleUint64 = "less_double_uint64"
- LessString = "less_string"
- LessBytes = "less_bytes"
- LessTimestamp = "less_timestamp"
- LessDuration = "less_duration"
- LessEqualsBool = "less_equals_bool"
- LessEqualsInt64 = "less_equals_int64"
- LessEqualsInt64Double = "less_equals_int64_double"
- LessEqualsInt64Uint64 = "less_equals_int64_uint64"
- LessEqualsUint64 = "less_equals_uint64"
- LessEqualsUint64Double = "less_equals_uint64_double"
- LessEqualsUint64Int64 = "less_equals_uint64_int64"
- LessEqualsDouble = "less_equals_double"
- LessEqualsDoubleInt64 = "less_equals_double_int64"
- LessEqualsDoubleUint64 = "less_equals_double_uint64"
- LessEqualsString = "less_equals_string"
- LessEqualsBytes = "less_equals_bytes"
- LessEqualsTimestamp = "less_equals_timestamp"
- LessEqualsDuration = "less_equals_duration"
- GreaterBool = "greater_bool"
- GreaterInt64 = "greater_int64"
- GreaterInt64Double = "greater_int64_double"
- GreaterInt64Uint64 = "greater_int64_uint64"
- GreaterUint64 = "greater_uint64"
- GreaterUint64Double = "greater_uint64_double"
- GreaterUint64Int64 = "greater_uint64_int64"
- GreaterDouble = "greater_double"
- GreaterDoubleInt64 = "greater_double_int64"
- GreaterDoubleUint64 = "greater_double_uint64"
- GreaterString = "greater_string"
- GreaterBytes = "greater_bytes"
- GreaterTimestamp = "greater_timestamp"
- GreaterDuration = "greater_duration"
- GreaterEqualsBool = "greater_equals_bool"
- GreaterEqualsInt64 = "greater_equals_int64"
- GreaterEqualsInt64Double = "greater_equals_int64_double"
- GreaterEqualsInt64Uint64 = "greater_equals_int64_uint64"
- GreaterEqualsUint64 = "greater_equals_uint64"
- GreaterEqualsUint64Double = "greater_equals_uint64_double"
- GreaterEqualsUint64Int64 = "greater_equals_uint64_int64"
- GreaterEqualsDouble = "greater_equals_double"
- GreaterEqualsDoubleInt64 = "greater_equals_double_int64"
- GreaterEqualsDoubleUint64 = "greater_equals_double_uint64"
- GreaterEqualsString = "greater_equals_string"
- GreaterEqualsBytes = "greater_equals_bytes"
- GreaterEqualsTimestamp = "greater_equals_timestamp"
- GreaterEqualsDuration = "greater_equals_duration"
-)
-
-// Math overloads
-const (
- AddInt64 = "add_int64"
- AddUint64 = "add_uint64"
- AddDouble = "add_double"
- AddString = "add_string"
- AddBytes = "add_bytes"
- AddList = "add_list"
- AddTimestampDuration = "add_timestamp_duration"
- AddDurationTimestamp = "add_duration_timestamp"
- AddDurationDuration = "add_duration_duration"
- SubtractInt64 = "subtract_int64"
- SubtractUint64 = "subtract_uint64"
- SubtractDouble = "subtract_double"
- SubtractTimestampTimestamp = "subtract_timestamp_timestamp"
- SubtractTimestampDuration = "subtract_timestamp_duration"
- SubtractDurationDuration = "subtract_duration_duration"
- MultiplyInt64 = "multiply_int64"
- MultiplyUint64 = "multiply_uint64"
- MultiplyDouble = "multiply_double"
- DivideInt64 = "divide_int64"
- DivideUint64 = "divide_uint64"
- DivideDouble = "divide_double"
- ModuloInt64 = "modulo_int64"
- ModuloUint64 = "modulo_uint64"
- NegateInt64 = "negate_int64"
- NegateDouble = "negate_double"
-)
-
-// Index overloads
-const (
- IndexList = "index_list"
- IndexMap = "index_map"
- IndexMessage = "index_message" // TODO: introduce concept of types.Message
-)
-
-// In operators
-const (
- DeprecatedIn = "in"
- InList = "in_list"
- InMap = "in_map"
- InMessage = "in_message" // TODO: introduce concept of types.Message
-)
-
-// Size overloads
-const (
- Size = "size"
- SizeString = "size_string"
- SizeBytes = "size_bytes"
- SizeList = "size_list"
- SizeMap = "size_map"
- SizeStringInst = "string_size"
- SizeBytesInst = "bytes_size"
- SizeListInst = "list_size"
- SizeMapInst = "map_size"
-)
-
-// String function names.
-const (
- Contains = "contains"
- EndsWith = "endsWith"
- Matches = "matches"
- StartsWith = "startsWith"
-)
-
-// Extension function overloads with complex behaviors that need to be referenced in runtime and static analysis cost computations.
-const (
- ExtQuoteString = "strings_quote"
-)
-
-// String function overload names.
-const (
- ContainsString = "contains_string"
- EndsWithString = "ends_with_string"
- MatchesString = "matches_string"
- StartsWithString = "starts_with_string"
-)
-
-// Extension function overloads with complex behaviors that need to be referenced in runtime and static analysis cost computations.
-const (
- ExtFormatString = "string_format"
-)
-
-// Time-based functions.
-const (
- TimeGetFullYear = "getFullYear"
- TimeGetMonth = "getMonth"
- TimeGetDayOfYear = "getDayOfYear"
- TimeGetDate = "getDate"
- TimeGetDayOfMonth = "getDayOfMonth"
- TimeGetDayOfWeek = "getDayOfWeek"
- TimeGetHours = "getHours"
- TimeGetMinutes = "getMinutes"
- TimeGetSeconds = "getSeconds"
- TimeGetMilliseconds = "getMilliseconds"
-)
-
-// Timestamp overloads for time functions without timezones.
-const (
- TimestampToYear = "timestamp_to_year"
- TimestampToMonth = "timestamp_to_month"
- TimestampToDayOfYear = "timestamp_to_day_of_year"
- TimestampToDayOfMonthZeroBased = "timestamp_to_day_of_month"
- TimestampToDayOfMonthOneBased = "timestamp_to_day_of_month_1_based"
- TimestampToDayOfWeek = "timestamp_to_day_of_week"
- TimestampToHours = "timestamp_to_hours"
- TimestampToMinutes = "timestamp_to_minutes"
- TimestampToSeconds = "timestamp_to_seconds"
- TimestampToMilliseconds = "timestamp_to_milliseconds"
-)
-
-// Timestamp overloads for time functions with timezones.
-const (
- TimestampToYearWithTz = "timestamp_to_year_with_tz"
- TimestampToMonthWithTz = "timestamp_to_month_with_tz"
- TimestampToDayOfYearWithTz = "timestamp_to_day_of_year_with_tz"
- TimestampToDayOfMonthZeroBasedWithTz = "timestamp_to_day_of_month_with_tz"
- TimestampToDayOfMonthOneBasedWithTz = "timestamp_to_day_of_month_1_based_with_tz"
- TimestampToDayOfWeekWithTz = "timestamp_to_day_of_week_with_tz"
- TimestampToHoursWithTz = "timestamp_to_hours_with_tz"
- TimestampToMinutesWithTz = "timestamp_to_minutes_with_tz"
- TimestampToSecondsWithTz = "timestamp_to_seconds_tz"
- TimestampToMillisecondsWithTz = "timestamp_to_milliseconds_with_tz"
-)
-
-// Duration overloads for time functions.
-const (
- DurationToHours = "duration_to_hours"
- DurationToMinutes = "duration_to_minutes"
- DurationToSeconds = "duration_to_seconds"
- DurationToMilliseconds = "duration_to_milliseconds"
-)
-
-// Type conversion methods and overloads
-const (
- TypeConvertInt = "int"
- TypeConvertUint = "uint"
- TypeConvertDouble = "double"
- TypeConvertBool = "bool"
- TypeConvertString = "string"
- TypeConvertBytes = "bytes"
- TypeConvertTimestamp = "timestamp"
- TypeConvertDuration = "duration"
- TypeConvertType = "type"
- TypeConvertDyn = "dyn"
-)
-
-// Int conversion functions.
-const (
- IntToInt = "int64_to_int64"
- UintToInt = "uint64_to_int64"
- DoubleToInt = "double_to_int64"
- StringToInt = "string_to_int64"
- TimestampToInt = "timestamp_to_int64"
- DurationToInt = "duration_to_int64"
-)
-
-// Uint conversion functions.
-const (
- UintToUint = "uint64_to_uint64"
- IntToUint = "int64_to_uint64"
- DoubleToUint = "double_to_uint64"
- StringToUint = "string_to_uint64"
-)
-
-// Double conversion functions.
-const (
- DoubleToDouble = "double_to_double"
- IntToDouble = "int64_to_double"
- UintToDouble = "uint64_to_double"
- StringToDouble = "string_to_double"
-)
-
-// Bool conversion functions.
-const (
- BoolToBool = "bool_to_bool"
- StringToBool = "string_to_bool"
-)
-
-// Bytes conversion functions.
-const (
- BytesToBytes = "bytes_to_bytes"
- StringToBytes = "string_to_bytes"
-)
-
-// String conversion functions.
-const (
- StringToString = "string_to_string"
- BoolToString = "bool_to_string"
- IntToString = "int64_to_string"
- UintToString = "uint64_to_string"
- DoubleToString = "double_to_string"
- BytesToString = "bytes_to_string"
- TimestampToString = "timestamp_to_string"
- DurationToString = "duration_to_string"
-)
-
-// Timestamp conversion functions
-const (
- TimestampToTimestamp = "timestamp_to_timestamp"
- StringToTimestamp = "string_to_timestamp"
- IntToTimestamp = "int64_to_timestamp"
-)
-
-// Convert duration from string
-const (
- DurationToDuration = "duration_to_duration"
- StringToDuration = "string_to_duration"
- IntToDuration = "int64_to_duration"
-)
-
-// Convert to dyn
-const (
- ToDyn = "to_dyn"
-)
-
-// Comprehensions helper methods, not directly accessible via a developer.
-const (
- Iterator = "@iterator"
- HasNext = "@hasNext"
- Next = "@next"
-)
-
-// IsTypeConversionFunction returns whether the input function is a standard library type
-// conversion function.
-func IsTypeConversionFunction(function string) bool {
- switch function {
- case TypeConvertBool,
- TypeConvertBytes,
- TypeConvertDouble,
- TypeConvertDuration,
- TypeConvertDyn,
- TypeConvertInt,
- TypeConvertString,
- TypeConvertTimestamp,
- TypeConvertType,
- TypeConvertUint:
- return true
- default:
- return false
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/runes/BUILD.bazel b/vendor/github.com/google/cel-go/common/runes/BUILD.bazel
deleted file mode 100644
index bb30242cf..000000000
--- a/vendor/github.com/google/cel-go/common/runes/BUILD.bazel
+++ /dev/null
@@ -1,25 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "buffer.go",
- ],
- importpath = "github.com/google/cel-go/common/runes",
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = [
- "buffer_test.go",
- ],
- embed = [
- ":go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/runes/buffer.go b/vendor/github.com/google/cel-go/common/runes/buffer.go
deleted file mode 100644
index 50aac0b27..000000000
--- a/vendor/github.com/google/cel-go/common/runes/buffer.go
+++ /dev/null
@@ -1,194 +0,0 @@
-// Copyright 2021 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package runes provides interfaces and utilities for working with runes.
-package runes
-
-import (
- "strings"
- "unicode/utf8"
-)
-
-// Buffer is an interface for accessing a contiguous array of code points.
-type Buffer interface {
- Get(i int) rune
- Slice(i, j int) string
- Len() int
-}
-
-type emptyBuffer struct{}
-
-func (e *emptyBuffer) Get(i int) rune {
- panic("slice index out of bounds")
-}
-
-func (e *emptyBuffer) Slice(i, j int) string {
- if i != 0 || i != j {
- panic("slice index out of bounds")
- }
- return ""
-}
-
-func (e *emptyBuffer) Len() int {
- return 0
-}
-
-var _ Buffer = &emptyBuffer{}
-
-// asciiBuffer is an implementation for an array of code points that contain code points only from
-// the ASCII character set.
-type asciiBuffer struct {
- arr []byte
-}
-
-func (a *asciiBuffer) Get(i int) rune {
- return rune(uint32(a.arr[i]))
-}
-
-func (a *asciiBuffer) Slice(i, j int) string {
- return string(a.arr[i:j])
-}
-
-func (a *asciiBuffer) Len() int {
- return len(a.arr)
-}
-
-var _ Buffer = &asciiBuffer{}
-
-// basicBuffer is an implementation for an array of code points that contain code points from both
-// the Latin-1 character set and Basic Multilingual Plane.
-type basicBuffer struct {
- arr []uint16
-}
-
-func (b *basicBuffer) Get(i int) rune {
- return rune(uint32(b.arr[i]))
-}
-
-func (b *basicBuffer) Slice(i, j int) string {
- var str strings.Builder
- str.Grow((j - i) * 3) // Worst case encoding size for 0xffff is 3.
- for ; i < j; i++ {
- str.WriteRune(rune(uint32(b.arr[i])))
- }
- return str.String()
-}
-
-func (b *basicBuffer) Len() int {
- return len(b.arr)
-}
-
-var _ Buffer = &basicBuffer{}
-
-// supplementalBuffer is an implementation for an array of code points that contain code points from
-// the Latin-1 character set, Basic Multilingual Plane, or the Supplemental Multilingual Plane.
-type supplementalBuffer struct {
- arr []rune
-}
-
-func (s *supplementalBuffer) Get(i int) rune {
- return rune(uint32(s.arr[i]))
-}
-
-func (s *supplementalBuffer) Slice(i, j int) string {
- return string(s.arr[i:j])
-}
-
-func (s *supplementalBuffer) Len() int {
- return len(s.arr)
-}
-
-var _ Buffer = &supplementalBuffer{}
-
-var nilBuffer = &emptyBuffer{}
-
-// NewBuffer returns an efficient implementation of Buffer for the given text based on the ranges of
-// the encoded code points contained within.
-//
-// Code points are represented as an array of byte, uint16, or rune. This approach ensures that
-// each index represents a code point by itself without needing to use an array of rune. At first
-// we assume all code points are less than or equal to '\u007f'. If this holds true, the
-// underlying storage is a byte array containing only ASCII characters. If we encountered a code
-// point above this range but less than or equal to '\uffff' we allocate a uint16 array, copy the
-// elements of previous byte array to the uint16 array, and continue. If this holds true, the
-// underlying storage is a uint16 array containing only Unicode characters in the Basic Multilingual
-// Plane. If we encounter a code point above '\uffff' we allocate an rune array, copy the previous
-// elements of the byte or uint16 array, and continue. The underlying storage is an rune array
-// containing any Unicode character.
-func NewBuffer(data string) Buffer {
- if len(data) == 0 {
- return nilBuffer
- }
- var (
- idx = 0
- buf8 = make([]byte, 0, len(data))
- buf16 []uint16
- buf32 []rune
- )
- for idx < len(data) {
- r, s := utf8.DecodeRuneInString(data[idx:])
- idx += s
- if r < utf8.RuneSelf {
- buf8 = append(buf8, byte(r))
- continue
- }
- if r <= 0xffff {
- buf16 = make([]uint16, len(buf8), len(data))
- for i, v := range buf8 {
- buf16[i] = uint16(v)
- }
- buf8 = nil
- buf16 = append(buf16, uint16(r))
- goto copy16
- }
- buf32 = make([]rune, len(buf8), len(data))
- for i, v := range buf8 {
- buf32[i] = rune(uint32(v))
- }
- buf8 = nil
- buf32 = append(buf32, r)
- goto copy32
- }
- return &asciiBuffer{
- arr: buf8,
- }
-copy16:
- for idx < len(data) {
- r, s := utf8.DecodeRuneInString(data[idx:])
- idx += s
- if r <= 0xffff {
- buf16 = append(buf16, uint16(r))
- continue
- }
- buf32 = make([]rune, len(buf16), len(data))
- for i, v := range buf16 {
- buf32[i] = rune(uint32(v))
- }
- buf16 = nil
- buf32 = append(buf32, r)
- goto copy32
- }
- return &basicBuffer{
- arr: buf16,
- }
-copy32:
- for idx < len(data) {
- r, s := utf8.DecodeRuneInString(data[idx:])
- idx += s
- buf32 = append(buf32, r)
- }
- return &supplementalBuffer{
- arr: buf32,
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/source.go b/vendor/github.com/google/cel-go/common/source.go
deleted file mode 100644
index acf22bdf1..000000000
--- a/vendor/github.com/google/cel-go/common/source.go
+++ /dev/null
@@ -1,183 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package common
-
-import (
- "strings"
- "unicode/utf8"
-
- "github.com/google/cel-go/common/runes"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// Source interface for filter source contents.
-type Source interface {
- // Content returns the source content represented as a string.
- // Examples contents are the single file contents, textbox field,
- // or url parameter.
- Content() string
-
- // Description gives a brief description of the source.
- // Example descriptions are a file name or ui element.
- Description() string
-
- // LineOffsets gives the character offsets at which lines occur.
- // The zero-th entry should refer to the break between the first
- // and second line, or EOF if there is only one line of source.
- LineOffsets() []int32
-
- // LocationOffset translates a Location to an offset.
- // Given the line and column of the Location returns the
- // Location's character offset in the Source, and a bool
- // indicating whether the Location was found.
- LocationOffset(location Location) (int32, bool)
-
- // OffsetLocation translates a character offset to a Location, or
- // false if the conversion was not feasible.
- OffsetLocation(offset int32) (Location, bool)
-
- // NewLocation takes an input line and column and produces a Location.
- // The default behavior is to treat the line and column as absolute,
- // but concrete derivations may use this method to convert a relative
- // line and column position into an absolute location.
- NewLocation(line, col int) Location
-
- // Snippet returns a line of content and whether the line was found.
- Snippet(line int) (string, bool)
-}
-
-// The sourceImpl type implementation of the Source interface.
-type sourceImpl struct {
- runes.Buffer
- description string
- lineOffsets []int32
-}
-
-var _ runes.Buffer = &sourceImpl{}
-
-// TODO(jimlarson) "Character offsets" should index the code points
-// within the UTF-8 encoded string. It currently indexes bytes.
-// Can be accomplished by using rune[] instead of string for contents.
-
-// NewTextSource creates a new Source from the input text string.
-func NewTextSource(text string) Source {
- return NewStringSource(text, "")
-}
-
-// NewStringSource creates a new Source from the given contents and description.
-func NewStringSource(contents string, description string) Source {
- // Compute line offsets up front as they are referred to frequently.
- lines := strings.Split(contents, "\n")
- offsets := make([]int32, len(lines))
- var offset int32
- for i, line := range lines {
- offset = offset + int32(utf8.RuneCountInString(line)) + 1
- offsets[int32(i)] = offset
- }
- return &sourceImpl{
- Buffer: runes.NewBuffer(contents),
- description: description,
- lineOffsets: offsets,
- }
-}
-
-// NewInfoSource creates a new Source from a SourceInfo.
-func NewInfoSource(info *exprpb.SourceInfo) Source {
- return &sourceImpl{
- Buffer: runes.NewBuffer(""),
- description: info.GetLocation(),
- lineOffsets: info.GetLineOffsets(),
- }
-}
-
-// Content implements the Source interface method.
-func (s *sourceImpl) Content() string {
- return s.Slice(0, s.Len())
-}
-
-// Description implements the Source interface method.
-func (s *sourceImpl) Description() string {
- return s.description
-}
-
-// LineOffsets implements the Source interface method.
-func (s *sourceImpl) LineOffsets() []int32 {
- return s.lineOffsets
-}
-
-// LocationOffset implements the Source interface method.
-func (s *sourceImpl) LocationOffset(location Location) (int32, bool) {
- if lineOffset, found := s.findLineOffset(location.Line()); found {
- return lineOffset + int32(location.Column()), true
- }
- return -1, false
-}
-
-// NewLocation implements the Source interface method.
-func (s *sourceImpl) NewLocation(line, col int) Location {
- return NewLocation(line, col)
-}
-
-// OffsetLocation implements the Source interface method.
-func (s *sourceImpl) OffsetLocation(offset int32) (Location, bool) {
- line, lineOffset := s.findLine(offset)
- return NewLocation(int(line), int(offset-lineOffset)), true
-}
-
-// Snippet implements the Source interface method.
-func (s *sourceImpl) Snippet(line int) (string, bool) {
- charStart, found := s.findLineOffset(line)
- if !found || s.Len() == 0 {
- return "", false
- }
- charEnd, found := s.findLineOffset(line + 1)
- if found {
- return s.Slice(int(charStart), int(charEnd-1)), true
- }
- return s.Slice(int(charStart), s.Len()), true
-}
-
-// findLineOffset returns the offset where the (1-indexed) line begins,
-// or false if line doesn't exist.
-func (s *sourceImpl) findLineOffset(line int) (int32, bool) {
- if line == 1 {
- return 0, true
- }
- if line > 1 && line <= int(len(s.lineOffsets)) {
- offset := s.lineOffsets[line-2]
- return offset, true
- }
- return -1, false
-}
-
-// findLine finds the line that contains the given character offset and
-// returns the line number and offset of the beginning of that line.
-// Note that the last line is treated as if it contains all offsets
-// beyond the end of the actual source.
-func (s *sourceImpl) findLine(characterOffset int32) (int32, int32) {
- var line int32 = 1
- for _, lineOffset := range s.lineOffsets {
- if lineOffset > characterOffset {
- break
- } else {
- line++
- }
- }
- if line == 1 {
- return line, 0
- }
- return line, s.lineOffsets[line-2]
-}
diff --git a/vendor/github.com/google/cel-go/common/stdlib/BUILD.bazel b/vendor/github.com/google/cel-go/common/stdlib/BUILD.bazel
deleted file mode 100644
index c130a93f6..000000000
--- a/vendor/github.com/google/cel-go/common/stdlib/BUILD.bazel
+++ /dev/null
@@ -1,25 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "standard.go",
- ],
- importpath = "github.com/google/cel-go/common/stdlib",
- deps = [
- "//checker/decls:go_default_library",
- "//common/decls:go_default_library",
- "//common/functions:go_default_library",
- "//common/operators:go_default_library",
- "//common/overloads:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- "//common/types/traits:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- ],
-)
\ No newline at end of file
diff --git a/vendor/github.com/google/cel-go/common/stdlib/standard.go b/vendor/github.com/google/cel-go/common/stdlib/standard.go
deleted file mode 100644
index d02cb64bf..000000000
--- a/vendor/github.com/google/cel-go/common/stdlib/standard.go
+++ /dev/null
@@ -1,661 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package stdlib contains all of the standard library function declarations and definitions for CEL.
-package stdlib
-
-import (
- "github.com/google/cel-go/common/decls"
- "github.com/google/cel-go/common/functions"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-var (
- stdFunctions []*decls.FunctionDecl
- stdFnDecls []*exprpb.Decl
- stdTypes []*decls.VariableDecl
- stdTypeDecls []*exprpb.Decl
-)
-
-func init() {
- paramA := types.NewTypeParamType("A")
- paramB := types.NewTypeParamType("B")
- listOfA := types.NewListType(paramA)
- mapOfAB := types.NewMapType(paramA, paramB)
-
- stdTypes = []*decls.VariableDecl{
- decls.TypeVariable(types.BoolType),
- decls.TypeVariable(types.BytesType),
- decls.TypeVariable(types.DoubleType),
- decls.TypeVariable(types.DurationType),
- decls.TypeVariable(types.IntType),
- decls.TypeVariable(listOfA),
- decls.TypeVariable(mapOfAB),
- decls.TypeVariable(types.NullType),
- decls.TypeVariable(types.StringType),
- decls.TypeVariable(types.TimestampType),
- decls.TypeVariable(types.TypeType),
- decls.TypeVariable(types.UintType),
- }
-
- stdTypeDecls = make([]*exprpb.Decl, 0, len(stdTypes))
- for _, stdType := range stdTypes {
- typeVar, err := decls.VariableDeclToExprDecl(stdType)
- if err != nil {
- panic(err)
- }
- stdTypeDecls = append(stdTypeDecls, typeVar)
- }
-
- stdFunctions = []*decls.FunctionDecl{
- // Logical operators. Special-cased within the interpreter.
- // Note, the singleton binding prevents extensions from overriding the operator behavior.
- function(operators.Conditional,
- decls.Overload(overloads.Conditional, argTypes(types.BoolType, paramA, paramA), paramA,
- decls.OverloadIsNonStrict()),
- decls.SingletonFunctionBinding(noFunctionOverrides)),
- function(operators.LogicalAnd,
- decls.Overload(overloads.LogicalAnd, argTypes(types.BoolType, types.BoolType), types.BoolType,
- decls.OverloadIsNonStrict()),
- decls.SingletonBinaryBinding(noBinaryOverrides)),
- function(operators.LogicalOr,
- decls.Overload(overloads.LogicalOr, argTypes(types.BoolType, types.BoolType), types.BoolType,
- decls.OverloadIsNonStrict()),
- decls.SingletonBinaryBinding(noBinaryOverrides)),
- function(operators.LogicalNot,
- decls.Overload(overloads.LogicalNot, argTypes(types.BoolType), types.BoolType),
- decls.SingletonUnaryBinding(func(val ref.Val) ref.Val {
- b, ok := val.(types.Bool)
- if !ok {
- return types.MaybeNoSuchOverloadErr(val)
- }
- return b.Negate()
- })),
-
- // Comprehension short-circuiting related function
- function(operators.NotStrictlyFalse,
- decls.Overload(overloads.NotStrictlyFalse, argTypes(types.BoolType), types.BoolType,
- decls.OverloadIsNonStrict(),
- decls.UnaryBinding(notStrictlyFalse))),
- // Deprecated: __not_strictly_false__
- function(operators.OldNotStrictlyFalse,
- decls.DisableDeclaration(true), // safe deprecation
- decls.Overload(operators.OldNotStrictlyFalse, argTypes(types.BoolType), types.BoolType,
- decls.OverloadIsNonStrict(),
- decls.UnaryBinding(notStrictlyFalse))),
-
- // Equality / inequality. Special-cased in the interpreter
- function(operators.Equals,
- decls.Overload(overloads.Equals, argTypes(paramA, paramA), types.BoolType),
- decls.SingletonBinaryBinding(noBinaryOverrides)),
- function(operators.NotEquals,
- decls.Overload(overloads.NotEquals, argTypes(paramA, paramA), types.BoolType),
- decls.SingletonBinaryBinding(noBinaryOverrides)),
-
- // Mathematical operators
- function(operators.Add,
- decls.Overload(overloads.AddBytes,
- argTypes(types.BytesType, types.BytesType), types.BytesType),
- decls.Overload(overloads.AddDouble,
- argTypes(types.DoubleType, types.DoubleType), types.DoubleType),
- decls.Overload(overloads.AddDurationDuration,
- argTypes(types.DurationType, types.DurationType), types.DurationType),
- decls.Overload(overloads.AddDurationTimestamp,
- argTypes(types.DurationType, types.TimestampType), types.TimestampType),
- decls.Overload(overloads.AddTimestampDuration,
- argTypes(types.TimestampType, types.DurationType), types.TimestampType),
- decls.Overload(overloads.AddInt64,
- argTypes(types.IntType, types.IntType), types.IntType),
- decls.Overload(overloads.AddList,
- argTypes(listOfA, listOfA), listOfA),
- decls.Overload(overloads.AddString,
- argTypes(types.StringType, types.StringType), types.StringType),
- decls.Overload(overloads.AddUint64,
- argTypes(types.UintType, types.UintType), types.UintType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- return lhs.(traits.Adder).Add(rhs)
- }, traits.AdderType)),
- function(operators.Divide,
- decls.Overload(overloads.DivideDouble,
- argTypes(types.DoubleType, types.DoubleType), types.DoubleType),
- decls.Overload(overloads.DivideInt64,
- argTypes(types.IntType, types.IntType), types.IntType),
- decls.Overload(overloads.DivideUint64,
- argTypes(types.UintType, types.UintType), types.UintType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- return lhs.(traits.Divider).Divide(rhs)
- }, traits.DividerType)),
- function(operators.Modulo,
- decls.Overload(overloads.ModuloInt64,
- argTypes(types.IntType, types.IntType), types.IntType),
- decls.Overload(overloads.ModuloUint64,
- argTypes(types.UintType, types.UintType), types.UintType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- return lhs.(traits.Modder).Modulo(rhs)
- }, traits.ModderType)),
- function(operators.Multiply,
- decls.Overload(overloads.MultiplyDouble,
- argTypes(types.DoubleType, types.DoubleType), types.DoubleType),
- decls.Overload(overloads.MultiplyInt64,
- argTypes(types.IntType, types.IntType), types.IntType),
- decls.Overload(overloads.MultiplyUint64,
- argTypes(types.UintType, types.UintType), types.UintType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- return lhs.(traits.Multiplier).Multiply(rhs)
- }, traits.MultiplierType)),
- function(operators.Negate,
- decls.Overload(overloads.NegateDouble, argTypes(types.DoubleType), types.DoubleType),
- decls.Overload(overloads.NegateInt64, argTypes(types.IntType), types.IntType),
- decls.SingletonUnaryBinding(func(val ref.Val) ref.Val {
- if types.IsBool(val) {
- return types.MaybeNoSuchOverloadErr(val)
- }
- return val.(traits.Negater).Negate()
- }, traits.NegatorType)),
- function(operators.Subtract,
- decls.Overload(overloads.SubtractDouble,
- argTypes(types.DoubleType, types.DoubleType), types.DoubleType),
- decls.Overload(overloads.SubtractDurationDuration,
- argTypes(types.DurationType, types.DurationType), types.DurationType),
- decls.Overload(overloads.SubtractInt64,
- argTypes(types.IntType, types.IntType), types.IntType),
- decls.Overload(overloads.SubtractTimestampDuration,
- argTypes(types.TimestampType, types.DurationType), types.TimestampType),
- decls.Overload(overloads.SubtractTimestampTimestamp,
- argTypes(types.TimestampType, types.TimestampType), types.DurationType),
- decls.Overload(overloads.SubtractUint64,
- argTypes(types.UintType, types.UintType), types.UintType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- return lhs.(traits.Subtractor).Subtract(rhs)
- }, traits.SubtractorType)),
-
- // Relations operators
-
- function(operators.Less,
- decls.Overload(overloads.LessBool,
- argTypes(types.BoolType, types.BoolType), types.BoolType),
- decls.Overload(overloads.LessInt64,
- argTypes(types.IntType, types.IntType), types.BoolType),
- decls.Overload(overloads.LessInt64Double,
- argTypes(types.IntType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.LessInt64Uint64,
- argTypes(types.IntType, types.UintType), types.BoolType),
- decls.Overload(overloads.LessUint64,
- argTypes(types.UintType, types.UintType), types.BoolType),
- decls.Overload(overloads.LessUint64Double,
- argTypes(types.UintType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.LessUint64Int64,
- argTypes(types.UintType, types.IntType), types.BoolType),
- decls.Overload(overloads.LessDouble,
- argTypes(types.DoubleType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.LessDoubleInt64,
- argTypes(types.DoubleType, types.IntType), types.BoolType),
- decls.Overload(overloads.LessDoubleUint64,
- argTypes(types.DoubleType, types.UintType), types.BoolType),
- decls.Overload(overloads.LessString,
- argTypes(types.StringType, types.StringType), types.BoolType),
- decls.Overload(overloads.LessBytes,
- argTypes(types.BytesType, types.BytesType), types.BoolType),
- decls.Overload(overloads.LessTimestamp,
- argTypes(types.TimestampType, types.TimestampType), types.BoolType),
- decls.Overload(overloads.LessDuration,
- argTypes(types.DurationType, types.DurationType), types.BoolType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- cmp := lhs.(traits.Comparer).Compare(rhs)
- if cmp == types.IntNegOne {
- return types.True
- }
- if cmp == types.IntOne || cmp == types.IntZero {
- return types.False
- }
- return cmp
- }, traits.ComparerType)),
-
- function(operators.LessEquals,
- decls.Overload(overloads.LessEqualsBool,
- argTypes(types.BoolType, types.BoolType), types.BoolType),
- decls.Overload(overloads.LessEqualsInt64,
- argTypes(types.IntType, types.IntType), types.BoolType),
- decls.Overload(overloads.LessEqualsInt64Double,
- argTypes(types.IntType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.LessEqualsInt64Uint64,
- argTypes(types.IntType, types.UintType), types.BoolType),
- decls.Overload(overloads.LessEqualsUint64,
- argTypes(types.UintType, types.UintType), types.BoolType),
- decls.Overload(overloads.LessEqualsUint64Double,
- argTypes(types.UintType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.LessEqualsUint64Int64,
- argTypes(types.UintType, types.IntType), types.BoolType),
- decls.Overload(overloads.LessEqualsDouble,
- argTypes(types.DoubleType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.LessEqualsDoubleInt64,
- argTypes(types.DoubleType, types.IntType), types.BoolType),
- decls.Overload(overloads.LessEqualsDoubleUint64,
- argTypes(types.DoubleType, types.UintType), types.BoolType),
- decls.Overload(overloads.LessEqualsString,
- argTypes(types.StringType, types.StringType), types.BoolType),
- decls.Overload(overloads.LessEqualsBytes,
- argTypes(types.BytesType, types.BytesType), types.BoolType),
- decls.Overload(overloads.LessEqualsTimestamp,
- argTypes(types.TimestampType, types.TimestampType), types.BoolType),
- decls.Overload(overloads.LessEqualsDuration,
- argTypes(types.DurationType, types.DurationType), types.BoolType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- cmp := lhs.(traits.Comparer).Compare(rhs)
- if cmp == types.IntNegOne || cmp == types.IntZero {
- return types.True
- }
- if cmp == types.IntOne {
- return types.False
- }
- return cmp
- }, traits.ComparerType)),
-
- function(operators.Greater,
- decls.Overload(overloads.GreaterBool,
- argTypes(types.BoolType, types.BoolType), types.BoolType),
- decls.Overload(overloads.GreaterInt64,
- argTypes(types.IntType, types.IntType), types.BoolType),
- decls.Overload(overloads.GreaterInt64Double,
- argTypes(types.IntType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.GreaterInt64Uint64,
- argTypes(types.IntType, types.UintType), types.BoolType),
- decls.Overload(overloads.GreaterUint64,
- argTypes(types.UintType, types.UintType), types.BoolType),
- decls.Overload(overloads.GreaterUint64Double,
- argTypes(types.UintType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.GreaterUint64Int64,
- argTypes(types.UintType, types.IntType), types.BoolType),
- decls.Overload(overloads.GreaterDouble,
- argTypes(types.DoubleType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.GreaterDoubleInt64,
- argTypes(types.DoubleType, types.IntType), types.BoolType),
- decls.Overload(overloads.GreaterDoubleUint64,
- argTypes(types.DoubleType, types.UintType), types.BoolType),
- decls.Overload(overloads.GreaterString,
- argTypes(types.StringType, types.StringType), types.BoolType),
- decls.Overload(overloads.GreaterBytes,
- argTypes(types.BytesType, types.BytesType), types.BoolType),
- decls.Overload(overloads.GreaterTimestamp,
- argTypes(types.TimestampType, types.TimestampType), types.BoolType),
- decls.Overload(overloads.GreaterDuration,
- argTypes(types.DurationType, types.DurationType), types.BoolType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- cmp := lhs.(traits.Comparer).Compare(rhs)
- if cmp == types.IntOne {
- return types.True
- }
- if cmp == types.IntNegOne || cmp == types.IntZero {
- return types.False
- }
- return cmp
- }, traits.ComparerType)),
-
- function(operators.GreaterEquals,
- decls.Overload(overloads.GreaterEqualsBool,
- argTypes(types.BoolType, types.BoolType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsInt64,
- argTypes(types.IntType, types.IntType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsInt64Double,
- argTypes(types.IntType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsInt64Uint64,
- argTypes(types.IntType, types.UintType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsUint64,
- argTypes(types.UintType, types.UintType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsUint64Double,
- argTypes(types.UintType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsUint64Int64,
- argTypes(types.UintType, types.IntType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsDouble,
- argTypes(types.DoubleType, types.DoubleType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsDoubleInt64,
- argTypes(types.DoubleType, types.IntType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsDoubleUint64,
- argTypes(types.DoubleType, types.UintType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsString,
- argTypes(types.StringType, types.StringType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsBytes,
- argTypes(types.BytesType, types.BytesType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsTimestamp,
- argTypes(types.TimestampType, types.TimestampType), types.BoolType),
- decls.Overload(overloads.GreaterEqualsDuration,
- argTypes(types.DurationType, types.DurationType), types.BoolType),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- cmp := lhs.(traits.Comparer).Compare(rhs)
- if cmp == types.IntOne || cmp == types.IntZero {
- return types.True
- }
- if cmp == types.IntNegOne {
- return types.False
- }
- return cmp
- }, traits.ComparerType)),
-
- // Indexing
- function(operators.Index,
- decls.Overload(overloads.IndexList, argTypes(listOfA, types.IntType), paramA),
- decls.Overload(overloads.IndexMap, argTypes(mapOfAB, paramA), paramB),
- decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val {
- return lhs.(traits.Indexer).Get(rhs)
- }, traits.IndexerType)),
-
- // Collections operators
- function(operators.In,
- decls.Overload(overloads.InList, argTypes(paramA, listOfA), types.BoolType),
- decls.Overload(overloads.InMap, argTypes(paramA, mapOfAB), types.BoolType),
- decls.SingletonBinaryBinding(inAggregate)),
- function(operators.OldIn,
- decls.DisableDeclaration(true), // safe deprecation
- decls.Overload(overloads.InList, argTypes(paramA, listOfA), types.BoolType),
- decls.Overload(overloads.InMap, argTypes(paramA, mapOfAB), types.BoolType),
- decls.SingletonBinaryBinding(inAggregate)),
- function(overloads.DeprecatedIn,
- decls.DisableDeclaration(true), // safe deprecation
- decls.Overload(overloads.InList, argTypes(paramA, listOfA), types.BoolType),
- decls.Overload(overloads.InMap, argTypes(paramA, mapOfAB), types.BoolType),
- decls.SingletonBinaryBinding(inAggregate)),
- function(overloads.Size,
- decls.Overload(overloads.SizeBytes, argTypes(types.BytesType), types.IntType),
- decls.MemberOverload(overloads.SizeBytesInst, argTypes(types.BytesType), types.IntType),
- decls.Overload(overloads.SizeList, argTypes(listOfA), types.IntType),
- decls.MemberOverload(overloads.SizeListInst, argTypes(listOfA), types.IntType),
- decls.Overload(overloads.SizeMap, argTypes(mapOfAB), types.IntType),
- decls.MemberOverload(overloads.SizeMapInst, argTypes(mapOfAB), types.IntType),
- decls.Overload(overloads.SizeString, argTypes(types.StringType), types.IntType),
- decls.MemberOverload(overloads.SizeStringInst, argTypes(types.StringType), types.IntType),
- decls.SingletonUnaryBinding(func(val ref.Val) ref.Val {
- return val.(traits.Sizer).Size()
- }, traits.SizerType)),
-
- // Type conversions
- function(overloads.TypeConvertType,
- decls.Overload(overloads.TypeConvertType, argTypes(paramA), types.NewTypeTypeWithParam(paramA)),
- decls.SingletonUnaryBinding(convertToType(types.TypeType))),
-
- // Bool conversions
- function(overloads.TypeConvertBool,
- decls.Overload(overloads.BoolToBool, argTypes(types.BoolType), types.BoolType,
- decls.UnaryBinding(identity)),
- decls.Overload(overloads.StringToBool, argTypes(types.StringType), types.BoolType,
- decls.UnaryBinding(convertToType(types.BoolType)))),
-
- // Bytes conversions
- function(overloads.TypeConvertBytes,
- decls.Overload(overloads.BytesToBytes, argTypes(types.BytesType), types.BytesType,
- decls.UnaryBinding(identity)),
- decls.Overload(overloads.StringToBytes, argTypes(types.StringType), types.BytesType,
- decls.UnaryBinding(convertToType(types.BytesType)))),
-
- // Double conversions
- function(overloads.TypeConvertDouble,
- decls.Overload(overloads.DoubleToDouble, argTypes(types.DoubleType), types.DoubleType,
- decls.UnaryBinding(identity)),
- decls.Overload(overloads.IntToDouble, argTypes(types.IntType), types.DoubleType,
- decls.UnaryBinding(convertToType(types.DoubleType))),
- decls.Overload(overloads.StringToDouble, argTypes(types.StringType), types.DoubleType,
- decls.UnaryBinding(convertToType(types.DoubleType))),
- decls.Overload(overloads.UintToDouble, argTypes(types.UintType), types.DoubleType,
- decls.UnaryBinding(convertToType(types.DoubleType)))),
-
- // Duration conversions
- function(overloads.TypeConvertDuration,
- decls.Overload(overloads.DurationToDuration, argTypes(types.DurationType), types.DurationType,
- decls.UnaryBinding(identity)),
- decls.Overload(overloads.IntToDuration, argTypes(types.IntType), types.DurationType,
- decls.UnaryBinding(convertToType(types.DurationType))),
- decls.Overload(overloads.StringToDuration, argTypes(types.StringType), types.DurationType,
- decls.UnaryBinding(convertToType(types.DurationType)))),
-
- // Dyn conversions
- function(overloads.TypeConvertDyn,
- decls.Overload(overloads.ToDyn, argTypes(paramA), types.DynType),
- decls.SingletonUnaryBinding(identity)),
-
- // Int conversions
- function(overloads.TypeConvertInt,
- decls.Overload(overloads.IntToInt, argTypes(types.IntType), types.IntType,
- decls.UnaryBinding(identity)),
- decls.Overload(overloads.DoubleToInt, argTypes(types.DoubleType), types.IntType,
- decls.UnaryBinding(convertToType(types.IntType))),
- decls.Overload(overloads.DurationToInt, argTypes(types.DurationType), types.IntType,
- decls.UnaryBinding(convertToType(types.IntType))),
- decls.Overload(overloads.StringToInt, argTypes(types.StringType), types.IntType,
- decls.UnaryBinding(convertToType(types.IntType))),
- decls.Overload(overloads.TimestampToInt, argTypes(types.TimestampType), types.IntType,
- decls.UnaryBinding(convertToType(types.IntType))),
- decls.Overload(overloads.UintToInt, argTypes(types.UintType), types.IntType,
- decls.UnaryBinding(convertToType(types.IntType))),
- ),
-
- // String conversions
- function(overloads.TypeConvertString,
- decls.Overload(overloads.StringToString, argTypes(types.StringType), types.StringType,
- decls.UnaryBinding(identity)),
- decls.Overload(overloads.BoolToString, argTypes(types.BoolType), types.StringType,
- decls.UnaryBinding(convertToType(types.StringType))),
- decls.Overload(overloads.BytesToString, argTypes(types.BytesType), types.StringType,
- decls.UnaryBinding(convertToType(types.StringType))),
- decls.Overload(overloads.DoubleToString, argTypes(types.DoubleType), types.StringType,
- decls.UnaryBinding(convertToType(types.StringType))),
- decls.Overload(overloads.DurationToString, argTypes(types.DurationType), types.StringType,
- decls.UnaryBinding(convertToType(types.StringType))),
- decls.Overload(overloads.IntToString, argTypes(types.IntType), types.StringType,
- decls.UnaryBinding(convertToType(types.StringType))),
- decls.Overload(overloads.TimestampToString, argTypes(types.TimestampType), types.StringType,
- decls.UnaryBinding(convertToType(types.StringType))),
- decls.Overload(overloads.UintToString, argTypes(types.UintType), types.StringType,
- decls.UnaryBinding(convertToType(types.StringType)))),
-
- // Timestamp conversions
- function(overloads.TypeConvertTimestamp,
- decls.Overload(overloads.TimestampToTimestamp, argTypes(types.TimestampType), types.TimestampType,
- decls.UnaryBinding(identity)),
- decls.Overload(overloads.IntToTimestamp, argTypes(types.IntType), types.TimestampType,
- decls.UnaryBinding(convertToType(types.TimestampType))),
- decls.Overload(overloads.StringToTimestamp, argTypes(types.StringType), types.TimestampType,
- decls.UnaryBinding(convertToType(types.TimestampType)))),
-
- // Uint conversions
- function(overloads.TypeConvertUint,
- decls.Overload(overloads.UintToUint, argTypes(types.UintType), types.UintType,
- decls.UnaryBinding(identity)),
- decls.Overload(overloads.DoubleToUint, argTypes(types.DoubleType), types.UintType,
- decls.UnaryBinding(convertToType(types.UintType))),
- decls.Overload(overloads.IntToUint, argTypes(types.IntType), types.UintType,
- decls.UnaryBinding(convertToType(types.UintType))),
- decls.Overload(overloads.StringToUint, argTypes(types.StringType), types.UintType,
- decls.UnaryBinding(convertToType(types.UintType)))),
-
- // String functions
- function(overloads.Contains,
- decls.MemberOverload(overloads.ContainsString,
- argTypes(types.StringType, types.StringType), types.BoolType,
- decls.BinaryBinding(types.StringContains)),
- decls.DisableTypeGuards(true)),
- function(overloads.EndsWith,
- decls.MemberOverload(overloads.EndsWithString,
- argTypes(types.StringType, types.StringType), types.BoolType,
- decls.BinaryBinding(types.StringEndsWith)),
- decls.DisableTypeGuards(true)),
- function(overloads.StartsWith,
- decls.MemberOverload(overloads.StartsWithString,
- argTypes(types.StringType, types.StringType), types.BoolType,
- decls.BinaryBinding(types.StringStartsWith)),
- decls.DisableTypeGuards(true)),
- function(overloads.Matches,
- decls.Overload(overloads.Matches, argTypes(types.StringType, types.StringType), types.BoolType),
- decls.MemberOverload(overloads.MatchesString,
- argTypes(types.StringType, types.StringType), types.BoolType),
- decls.SingletonBinaryBinding(func(str, pat ref.Val) ref.Val {
- return str.(traits.Matcher).Match(pat)
- }, traits.MatcherType)),
-
- // Timestamp / duration functions
- function(overloads.TimeGetFullYear,
- decls.MemberOverload(overloads.TimestampToYear,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToYearWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType)),
-
- function(overloads.TimeGetMonth,
- decls.MemberOverload(overloads.TimestampToMonth,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToMonthWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType)),
-
- function(overloads.TimeGetDayOfYear,
- decls.MemberOverload(overloads.TimestampToDayOfYear,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToDayOfYearWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType)),
-
- function(overloads.TimeGetDayOfMonth,
- decls.MemberOverload(overloads.TimestampToDayOfMonthZeroBased,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToDayOfMonthZeroBasedWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType)),
-
- function(overloads.TimeGetDate,
- decls.MemberOverload(overloads.TimestampToDayOfMonthOneBased,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToDayOfMonthOneBasedWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType)),
-
- function(overloads.TimeGetDayOfWeek,
- decls.MemberOverload(overloads.TimestampToDayOfWeek,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToDayOfWeekWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType)),
-
- function(overloads.TimeGetHours,
- decls.MemberOverload(overloads.TimestampToHours,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToHoursWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType),
- decls.MemberOverload(overloads.DurationToHours,
- argTypes(types.DurationType), types.IntType)),
-
- function(overloads.TimeGetMinutes,
- decls.MemberOverload(overloads.TimestampToMinutes,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToMinutesWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType),
- decls.MemberOverload(overloads.DurationToMinutes,
- argTypes(types.DurationType), types.IntType)),
-
- function(overloads.TimeGetSeconds,
- decls.MemberOverload(overloads.TimestampToSeconds,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToSecondsWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType),
- decls.MemberOverload(overloads.DurationToSeconds,
- argTypes(types.DurationType), types.IntType)),
-
- function(overloads.TimeGetMilliseconds,
- decls.MemberOverload(overloads.TimestampToMilliseconds,
- argTypes(types.TimestampType), types.IntType),
- decls.MemberOverload(overloads.TimestampToMillisecondsWithTz,
- argTypes(types.TimestampType, types.StringType), types.IntType),
- decls.MemberOverload(overloads.DurationToMilliseconds,
- argTypes(types.DurationType), types.IntType)),
- }
-
- stdFnDecls = make([]*exprpb.Decl, 0, len(stdFunctions))
- for _, fn := range stdFunctions {
- if fn.IsDeclarationDisabled() {
- continue
- }
- ed, err := decls.FunctionDeclToExprDecl(fn)
- if err != nil {
- panic(err)
- }
- stdFnDecls = append(stdFnDecls, ed)
- }
-}
-
-// Functions returns the set of standard library function declarations and definitions for CEL.
-func Functions() []*decls.FunctionDecl {
- return stdFunctions
-}
-
-// FunctionExprDecls returns the legacy style protobuf-typed declarations for all functions and overloads
-// in the CEL standard environment.
-//
-// Deprecated: use Functions
-func FunctionExprDecls() []*exprpb.Decl {
- return stdFnDecls
-}
-
-// Types returns the set of standard library types for CEL.
-func Types() []*decls.VariableDecl {
- return stdTypes
-}
-
-// TypeExprDecls returns the legacy style protobuf-typed declarations for all types in the CEL
-// standard environment.
-//
-// Deprecated: use Types
-func TypeExprDecls() []*exprpb.Decl {
- return stdTypeDecls
-}
-
-func notStrictlyFalse(value ref.Val) ref.Val {
- if types.IsBool(value) {
- return value
- }
- return types.True
-}
-
-func inAggregate(lhs ref.Val, rhs ref.Val) ref.Val {
- if rhs.Type().HasTrait(traits.ContainerType) {
- return rhs.(traits.Container).Contains(lhs)
- }
- return types.ValOrErr(rhs, "no such overload")
-}
-
-func function(name string, opts ...decls.FunctionOpt) *decls.FunctionDecl {
- fn, err := decls.NewFunction(name, opts...)
- if err != nil {
- panic(err)
- }
- return fn
-}
-
-func argTypes(args ...*types.Type) []*types.Type {
- return args
-}
-
-func noBinaryOverrides(rhs, lhs ref.Val) ref.Val {
- return types.NoSuchOverloadErr()
-}
-
-func noFunctionOverrides(args ...ref.Val) ref.Val {
- return types.NoSuchOverloadErr()
-}
-
-func identity(val ref.Val) ref.Val {
- return val
-}
-
-func convertToType(t ref.Type) functions.UnaryOp {
- return func(val ref.Val) ref.Val {
- return val.ConvertToType(t)
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/types/BUILD.bazel b/vendor/github.com/google/cel-go/common/types/BUILD.bazel
deleted file mode 100644
index b5e44ffbf..000000000
--- a/vendor/github.com/google/cel-go/common/types/BUILD.bazel
+++ /dev/null
@@ -1,90 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "any_value.go",
- "bool.go",
- "bytes.go",
- "compare.go",
- "double.go",
- "duration.go",
- "err.go",
- "int.go",
- "iterator.go",
- "json_value.go",
- "list.go",
- "map.go",
- "null.go",
- "object.go",
- "optional.go",
- "overflow.go",
- "provider.go",
- "string.go",
- "timestamp.go",
- "types.go",
- "uint.go",
- "unknown.go",
- "util.go",
- ],
- importpath = "github.com/google/cel-go/common/types",
- deps = [
- "//checker/decls:go_default_library",
- "//common/overloads:go_default_library",
- "//common/types/pb:go_default_library",
- "//common/types/ref:go_default_library",
- "//common/types/traits:go_default_library",
- "@com_github_stoewer_go_strcase//:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//encoding/protojson:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//reflect/protoreflect:go_default_library",
- "@org_golang_google_protobuf//types/known/anypb:go_default_library",
- "@org_golang_google_protobuf//types/known/durationpb:go_default_library",
- "@org_golang_google_protobuf//types/known/structpb:go_default_library",
- "@org_golang_google_protobuf//types/known/timestamppb:go_default_library",
- "@org_golang_google_protobuf//types/known/wrapperspb:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = [
- "bool_test.go",
- "bytes_test.go",
- "double_test.go",
- "duration_test.go",
- "int_test.go",
- "json_list_test.go",
- "json_struct_test.go",
- "list_test.go",
- "map_test.go",
- "null_test.go",
- "object_test.go",
- "optional_test.go",
- "provider_test.go",
- "string_test.go",
- "timestamp_test.go",
- "types_test.go",
- "uint_test.go",
- "unknown_test.go",
- "util_test.go",
- ],
- embed = [":go_default_library"],
- deps = [
- "//common/types/ref:go_default_library",
- "//test:go_default_library",
- "//test/proto3pb:test_all_types_go_proto",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//encoding/protojson:go_default_library",
- "@org_golang_google_protobuf//types/known/anypb:go_default_library",
- "@org_golang_google_protobuf//types/known/durationpb:go_default_library",
- "@org_golang_google_protobuf//types/known/timestamppb:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/types/any_value.go b/vendor/github.com/google/cel-go/common/types/any_value.go
deleted file mode 100644
index cda0f13ac..000000000
--- a/vendor/github.com/google/cel-go/common/types/any_value.go
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "reflect"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
-)
-
-// anyValueType constant representing the reflected type of google.protobuf.Any.
-var anyValueType = reflect.TypeOf(&anypb.Any{})
diff --git a/vendor/github.com/google/cel-go/common/types/bool.go b/vendor/github.com/google/cel-go/common/types/bool.go
deleted file mode 100644
index 565734f3f..000000000
--- a/vendor/github.com/google/cel-go/common/types/bool.go
+++ /dev/null
@@ -1,141 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
- "strconv"
-
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
-)
-
-// Bool type that implements ref.Val and supports comparison and negation.
-type Bool bool
-
-var (
- // boolWrapperType golang reflected type for protobuf bool wrapper type.
- boolWrapperType = reflect.TypeOf(&wrapperspb.BoolValue{})
-)
-
-// Boolean constants
-const (
- False = Bool(false)
- True = Bool(true)
-)
-
-// Compare implements the traits.Comparer interface method.
-func (b Bool) Compare(other ref.Val) ref.Val {
- otherBool, ok := other.(Bool)
- if !ok {
- return ValOrErr(other, "no such overload")
- }
- if b == otherBool {
- return IntZero
- }
- if !b && otherBool {
- return IntNegOne
- }
- return IntOne
-}
-
-// ConvertToNative implements the ref.Val interface method.
-func (b Bool) ConvertToNative(typeDesc reflect.Type) (any, error) {
- switch typeDesc.Kind() {
- case reflect.Bool:
- return reflect.ValueOf(b).Convert(typeDesc).Interface(), nil
- case reflect.Ptr:
- switch typeDesc {
- case anyValueType:
- // Primitives must be wrapped to a wrapperspb.BoolValue before being packed into an Any.
- return anypb.New(wrapperspb.Bool(bool(b)))
- case boolWrapperType:
- // Convert the bool to a wrapperspb.BoolValue.
- return wrapperspb.Bool(bool(b)), nil
- case jsonValueType:
- // Return the bool as a new structpb.Value.
- return structpb.NewBoolValue(bool(b)), nil
- default:
- if typeDesc.Elem().Kind() == reflect.Bool {
- p := bool(b)
- return &p, nil
- }
- }
- case reflect.Interface:
- bv := b.Value()
- if reflect.TypeOf(bv).Implements(typeDesc) {
- return bv, nil
- }
- if reflect.TypeOf(b).Implements(typeDesc) {
- return b, nil
- }
- }
- return nil, fmt.Errorf("type conversion error from bool to '%v'", typeDesc)
-}
-
-// ConvertToType implements the ref.Val interface method.
-func (b Bool) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case StringType:
- return String(strconv.FormatBool(bool(b)))
- case BoolType:
- return b
- case TypeType:
- return BoolType
- }
- return NewErr("type conversion error from '%v' to '%v'", BoolType, typeVal)
-}
-
-// Equal implements the ref.Val interface method.
-func (b Bool) Equal(other ref.Val) ref.Val {
- otherBool, ok := other.(Bool)
- return Bool(ok && b == otherBool)
-}
-
-// IsZeroValue returns true if the boolean value is false.
-func (b Bool) IsZeroValue() bool {
- return b == False
-}
-
-// Negate implements the traits.Negater interface method.
-func (b Bool) Negate() ref.Val {
- return !b
-}
-
-// Type implements the ref.Val interface method.
-func (b Bool) Type() ref.Type {
- return BoolType
-}
-
-// Value implements the ref.Val interface method.
-func (b Bool) Value() any {
- return bool(b)
-}
-
-// IsBool returns whether the input ref.Val or ref.Type is equal to BoolType.
-func IsBool(elem ref.Val) bool {
- switch v := elem.(type) {
- case Bool:
- return true
- case ref.Val:
- return v.Type() == BoolType
- default:
- return false
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/types/bytes.go b/vendor/github.com/google/cel-go/common/types/bytes.go
deleted file mode 100644
index 5838755f8..000000000
--- a/vendor/github.com/google/cel-go/common/types/bytes.go
+++ /dev/null
@@ -1,130 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "bytes"
- "encoding/base64"
- "fmt"
- "reflect"
- "unicode/utf8"
-
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
-)
-
-// Bytes type that implements ref.Val and supports add, compare, and size
-// operations.
-type Bytes []byte
-
-var (
- // byteWrapperType golang reflected type for protobuf bytes wrapper type.
- byteWrapperType = reflect.TypeOf(&wrapperspb.BytesValue{})
-)
-
-// Add implements traits.Adder interface method by concatenating byte sequences.
-func (b Bytes) Add(other ref.Val) ref.Val {
- otherBytes, ok := other.(Bytes)
- if !ok {
- return ValOrErr(other, "no such overload")
- }
- return append(b, otherBytes...)
-}
-
-// Compare implements traits.Comparer interface method by lexicographic ordering.
-func (b Bytes) Compare(other ref.Val) ref.Val {
- otherBytes, ok := other.(Bytes)
- if !ok {
- return ValOrErr(other, "no such overload")
- }
- return Int(bytes.Compare(b, otherBytes))
-}
-
-// ConvertToNative implements the ref.Val interface method.
-func (b Bytes) ConvertToNative(typeDesc reflect.Type) (any, error) {
- switch typeDesc.Kind() {
- case reflect.Array, reflect.Slice:
- return reflect.ValueOf(b).Convert(typeDesc).Interface(), nil
- case reflect.Ptr:
- switch typeDesc {
- case anyValueType:
- // Primitives must be wrapped before being set on an Any field.
- return anypb.New(wrapperspb.Bytes([]byte(b)))
- case byteWrapperType:
- // Convert the bytes to a wrapperspb.BytesValue.
- return wrapperspb.Bytes([]byte(b)), nil
- case jsonValueType:
- // CEL follows the proto3 to JSON conversion by encoding bytes to a string via base64.
- // The encoding below matches the golang 'encoding/json' behavior during marshaling,
- // which uses base64.StdEncoding.
- str := base64.StdEncoding.EncodeToString([]byte(b))
- return structpb.NewStringValue(str), nil
- }
- case reflect.Interface:
- bv := b.Value()
- if reflect.TypeOf(bv).Implements(typeDesc) {
- return bv, nil
- }
- if reflect.TypeOf(b).Implements(typeDesc) {
- return b, nil
- }
- }
- return nil, fmt.Errorf("type conversion error from Bytes to '%v'", typeDesc)
-}
-
-// ConvertToType implements the ref.Val interface method.
-func (b Bytes) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case StringType:
- if !utf8.Valid(b) {
- return NewErr("invalid UTF-8 in bytes, cannot convert to string")
- }
- return String(b)
- case BytesType:
- return b
- case TypeType:
- return BytesType
- }
- return NewErr("type conversion error from '%s' to '%s'", BytesType, typeVal)
-}
-
-// Equal implements the ref.Val interface method.
-func (b Bytes) Equal(other ref.Val) ref.Val {
- otherBytes, ok := other.(Bytes)
- return Bool(ok && bytes.Equal(b, otherBytes))
-}
-
-// IsZeroValue returns true if the byte array is empty.
-func (b Bytes) IsZeroValue() bool {
- return len(b) == 0
-}
-
-// Size implements the traits.Sizer interface method.
-func (b Bytes) Size() ref.Val {
- return Int(len(b))
-}
-
-// Type implements the ref.Val interface method.
-func (b Bytes) Type() ref.Type {
- return BytesType
-}
-
-// Value implements the ref.Val interface method.
-func (b Bytes) Value() any {
- return []byte(b)
-}
diff --git a/vendor/github.com/google/cel-go/common/types/compare.go b/vendor/github.com/google/cel-go/common/types/compare.go
deleted file mode 100644
index e19682618..000000000
--- a/vendor/github.com/google/cel-go/common/types/compare.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright 2021 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "math"
-
- "github.com/google/cel-go/common/types/ref"
-)
-
-func compareDoubleInt(d Double, i Int) Int {
- if d < math.MinInt64 {
- return IntNegOne
- }
- if d > math.MaxInt64 {
- return IntOne
- }
- return compareDouble(d, Double(i))
-}
-
-func compareIntDouble(i Int, d Double) Int {
- return -compareDoubleInt(d, i)
-}
-
-func compareDoubleUint(d Double, u Uint) Int {
- if d < 0 {
- return IntNegOne
- }
- if d > math.MaxUint64 {
- return IntOne
- }
- return compareDouble(d, Double(u))
-}
-
-func compareUintDouble(u Uint, d Double) Int {
- return -compareDoubleUint(d, u)
-}
-
-func compareIntUint(i Int, u Uint) Int {
- if i < 0 || u > math.MaxInt64 {
- return IntNegOne
- }
- cmp := i - Int(u)
- if cmp < 0 {
- return IntNegOne
- }
- if cmp > 0 {
- return IntOne
- }
- return IntZero
-}
-
-func compareUintInt(u Uint, i Int) Int {
- return -compareIntUint(i, u)
-}
-
-func compareDouble(a, b Double) Int {
- if a < b {
- return IntNegOne
- }
- if a > b {
- return IntOne
- }
- return IntZero
-}
-
-func compareInt(a, b Int) ref.Val {
- if a < b {
- return IntNegOne
- }
- if a > b {
- return IntOne
- }
- return IntZero
-}
-
-func compareUint(a, b Uint) ref.Val {
- if a < b {
- return IntNegOne
- }
- if a > b {
- return IntOne
- }
- return IntZero
-}
diff --git a/vendor/github.com/google/cel-go/common/types/doc.go b/vendor/github.com/google/cel-go/common/types/doc.go
deleted file mode 100644
index 5f641d704..000000000
--- a/vendor/github.com/google/cel-go/common/types/doc.go
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package types contains the types, traits, and utilities common to all
-// components of expression handling.
-package types
diff --git a/vendor/github.com/google/cel-go/common/types/double.go b/vendor/github.com/google/cel-go/common/types/double.go
deleted file mode 100644
index 027e78978..000000000
--- a/vendor/github.com/google/cel-go/common/types/double.go
+++ /dev/null
@@ -1,211 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "math"
- "reflect"
-
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
-)
-
-// Double type that implements ref.Val, comparison, and mathematical
-// operations.
-type Double float64
-
-var (
- // doubleWrapperType reflected type for protobuf double wrapper type.
- doubleWrapperType = reflect.TypeOf(&wrapperspb.DoubleValue{})
-
- // floatWrapperType reflected type for protobuf float wrapper type.
- floatWrapperType = reflect.TypeOf(&wrapperspb.FloatValue{})
-)
-
-// Add implements traits.Adder.Add.
-func (d Double) Add(other ref.Val) ref.Val {
- otherDouble, ok := other.(Double)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- return d + otherDouble
-}
-
-// Compare implements traits.Comparer.Compare.
-func (d Double) Compare(other ref.Val) ref.Val {
- if math.IsNaN(float64(d)) {
- return NewErr("NaN values cannot be ordered")
- }
- switch ov := other.(type) {
- case Double:
- if math.IsNaN(float64(ov)) {
- return NewErr("NaN values cannot be ordered")
- }
- return compareDouble(d, ov)
- case Int:
- return compareDoubleInt(d, ov)
- case Uint:
- return compareDoubleUint(d, ov)
- default:
- return MaybeNoSuchOverloadErr(other)
- }
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (d Double) ConvertToNative(typeDesc reflect.Type) (any, error) {
- switch typeDesc.Kind() {
- case reflect.Float32:
- v := float32(d)
- return reflect.ValueOf(v).Convert(typeDesc).Interface(), nil
- case reflect.Float64:
- v := float64(d)
- return reflect.ValueOf(v).Convert(typeDesc).Interface(), nil
- case reflect.Ptr:
- switch typeDesc {
- case anyValueType:
- // Primitives must be wrapped before being set on an Any field.
- return anypb.New(wrapperspb.Double(float64(d)))
- case doubleWrapperType:
- // Convert to a wrapperspb.DoubleValue
- return wrapperspb.Double(float64(d)), nil
- case floatWrapperType:
- // Convert to a wrapperspb.FloatValue (with truncation).
- return wrapperspb.Float(float32(d)), nil
- case jsonValueType:
- // Note, there are special cases for proto3 to json conversion that
- // expect the floating point value to be converted to a NaN,
- // Infinity, or -Infinity string values, but the jsonpb string
- // marshaling of the protobuf.Value will handle this conversion.
- return structpb.NewNumberValue(float64(d)), nil
- }
- switch typeDesc.Elem().Kind() {
- case reflect.Float32:
- v := float32(d)
- p := reflect.New(typeDesc.Elem())
- p.Elem().Set(reflect.ValueOf(v).Convert(typeDesc.Elem()))
- return p.Interface(), nil
- case reflect.Float64:
- v := float64(d)
- p := reflect.New(typeDesc.Elem())
- p.Elem().Set(reflect.ValueOf(v).Convert(typeDesc.Elem()))
- return p.Interface(), nil
- }
- case reflect.Interface:
- dv := d.Value()
- if reflect.TypeOf(dv).Implements(typeDesc) {
- return dv, nil
- }
- if reflect.TypeOf(d).Implements(typeDesc) {
- return d, nil
- }
- }
- return nil, fmt.Errorf("type conversion error from Double to '%v'", typeDesc)
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (d Double) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case IntType:
- i, err := doubleToInt64Checked(float64(d))
- if err != nil {
- return WrapErr(err)
- }
- return Int(i)
- case UintType:
- i, err := doubleToUint64Checked(float64(d))
- if err != nil {
- return WrapErr(err)
- }
- return Uint(i)
- case DoubleType:
- return d
- case StringType:
- return String(fmt.Sprintf("%g", float64(d)))
- case TypeType:
- return DoubleType
- }
- return NewErr("type conversion error from '%s' to '%s'", DoubleType, typeVal)
-}
-
-// Divide implements traits.Divider.Divide.
-func (d Double) Divide(other ref.Val) ref.Val {
- otherDouble, ok := other.(Double)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- return d / otherDouble
-}
-
-// Equal implements ref.Val.Equal.
-func (d Double) Equal(other ref.Val) ref.Val {
- if math.IsNaN(float64(d)) {
- return False
- }
- switch ov := other.(type) {
- case Double:
- if math.IsNaN(float64(ov)) {
- return False
- }
- return Bool(d == ov)
- case Int:
- return Bool(compareDoubleInt(d, ov) == 0)
- case Uint:
- return Bool(compareDoubleUint(d, ov) == 0)
- default:
- return False
- }
-}
-
-// IsZeroValue returns true if double value is 0.0
-func (d Double) IsZeroValue() bool {
- return float64(d) == 0.0
-}
-
-// Multiply implements traits.Multiplier.Multiply.
-func (d Double) Multiply(other ref.Val) ref.Val {
- otherDouble, ok := other.(Double)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- return d * otherDouble
-}
-
-// Negate implements traits.Negater.Negate.
-func (d Double) Negate() ref.Val {
- return -d
-}
-
-// Subtract implements traits.Subtractor.Subtract.
-func (d Double) Subtract(subtrahend ref.Val) ref.Val {
- subtraDouble, ok := subtrahend.(Double)
- if !ok {
- return MaybeNoSuchOverloadErr(subtrahend)
- }
- return d - subtraDouble
-}
-
-// Type implements ref.Val.Type.
-func (d Double) Type() ref.Type {
- return DoubleType
-}
-
-// Value implements ref.Val.Value.
-func (d Double) Value() any {
- return float64(d)
-}
diff --git a/vendor/github.com/google/cel-go/common/types/duration.go b/vendor/github.com/google/cel-go/common/types/duration.go
deleted file mode 100644
index 596e56d6b..000000000
--- a/vendor/github.com/google/cel-go/common/types/duration.go
+++ /dev/null
@@ -1,222 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
- "strconv"
- "time"
-
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- dpb "google.golang.org/protobuf/types/known/durationpb"
- structpb "google.golang.org/protobuf/types/known/structpb"
-)
-
-// Duration type that implements ref.Val and supports add, compare, negate,
-// and subtract operators. This type is also a receiver which means it can
-// participate in dispatch to receiver functions.
-type Duration struct {
- time.Duration
-}
-
-func durationOf(d time.Duration) Duration {
- return Duration{Duration: d}
-}
-
-var (
- durationValueType = reflect.TypeOf(&dpb.Duration{})
-
- durationZeroArgOverloads = map[string]func(ref.Val) ref.Val{
- overloads.TimeGetHours: DurationGetHours,
- overloads.TimeGetMinutes: DurationGetMinutes,
- overloads.TimeGetSeconds: DurationGetSeconds,
- overloads.TimeGetMilliseconds: DurationGetMilliseconds,
- }
-)
-
-// Add implements traits.Adder.Add.
-func (d Duration) Add(other ref.Val) ref.Val {
- switch other.Type() {
- case DurationType:
- dur2 := other.(Duration)
- val, err := addDurationChecked(d.Duration, dur2.Duration)
- if err != nil {
- return WrapErr(err)
- }
- return durationOf(val)
- case TimestampType:
- ts := other.(Timestamp).Time
- val, err := addTimeDurationChecked(ts, d.Duration)
- if err != nil {
- return WrapErr(err)
- }
- return timestampOf(val)
- }
- return MaybeNoSuchOverloadErr(other)
-}
-
-// Compare implements traits.Comparer.Compare.
-func (d Duration) Compare(other ref.Val) ref.Val {
- otherDur, ok := other.(Duration)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- d1 := d.Duration
- d2 := otherDur.Duration
- switch {
- case d1 < d2:
- return IntNegOne
- case d1 > d2:
- return IntOne
- default:
- return IntZero
- }
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (d Duration) ConvertToNative(typeDesc reflect.Type) (any, error) {
- // If the duration is already assignable to the desired type return it.
- if reflect.TypeOf(d.Duration).AssignableTo(typeDesc) {
- return d.Duration, nil
- }
- if reflect.TypeOf(d).AssignableTo(typeDesc) {
- return d, nil
- }
- switch typeDesc {
- case anyValueType:
- // Pack the duration as a dpb.Duration into an Any value.
- return anypb.New(dpb.New(d.Duration))
- case durationValueType:
- // Unwrap the CEL value to its underlying proto value.
- return dpb.New(d.Duration), nil
- case jsonValueType:
- // CEL follows the proto3 to JSON conversion.
- // Note, using jsonpb would wrap the result in extra double quotes.
- v := d.ConvertToType(StringType)
- if IsError(v) {
- return nil, v.(*Err)
- }
- return structpb.NewStringValue(string(v.(String))), nil
- }
- return nil, fmt.Errorf("type conversion error from 'Duration' to '%v'", typeDesc)
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (d Duration) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case StringType:
- return String(strconv.FormatFloat(d.Seconds(), 'f', -1, 64) + "s")
- case IntType:
- return Int(d.Duration)
- case DurationType:
- return d
- case TypeType:
- return DurationType
- }
- return NewErr("type conversion error from '%s' to '%s'", DurationType, typeVal)
-}
-
-// Equal implements ref.Val.Equal.
-func (d Duration) Equal(other ref.Val) ref.Val {
- otherDur, ok := other.(Duration)
- return Bool(ok && d.Duration == otherDur.Duration)
-}
-
-// IsZeroValue returns true if the duration value is zero
-func (d Duration) IsZeroValue() bool {
- return d.Duration == 0
-}
-
-// Negate implements traits.Negater.Negate.
-func (d Duration) Negate() ref.Val {
- val, err := negateDurationChecked(d.Duration)
- if err != nil {
- return WrapErr(err)
- }
- return durationOf(val)
-}
-
-// Receive implements traits.Receiver.Receive.
-func (d Duration) Receive(function string, overload string, args []ref.Val) ref.Val {
- if len(args) == 0 {
- if f, found := durationZeroArgOverloads[function]; found {
- return f(d)
- }
- }
- return NoSuchOverloadErr()
-}
-
-// Subtract implements traits.Subtractor.Subtract.
-func (d Duration) Subtract(subtrahend ref.Val) ref.Val {
- subtraDur, ok := subtrahend.(Duration)
- if !ok {
- return MaybeNoSuchOverloadErr(subtrahend)
- }
- val, err := subtractDurationChecked(d.Duration, subtraDur.Duration)
- if err != nil {
- return WrapErr(err)
- }
- return durationOf(val)
-}
-
-// Type implements ref.Val.Type.
-func (d Duration) Type() ref.Type {
- return DurationType
-}
-
-// Value implements ref.Val.Value.
-func (d Duration) Value() any {
- return d.Duration
-}
-
-// DurationGetHours returns the duration in hours.
-func DurationGetHours(val ref.Val) ref.Val {
- dur, ok := val.(Duration)
- if !ok {
- return MaybeNoSuchOverloadErr(val)
- }
- return Int(dur.Hours())
-}
-
-// DurationGetMinutes returns duration in minutes.
-func DurationGetMinutes(val ref.Val) ref.Val {
- dur, ok := val.(Duration)
- if !ok {
- return MaybeNoSuchOverloadErr(val)
- }
- return Int(dur.Minutes())
-}
-
-// DurationGetSeconds returns duration in seconds.
-func DurationGetSeconds(val ref.Val) ref.Val {
- dur, ok := val.(Duration)
- if !ok {
- return MaybeNoSuchOverloadErr(val)
- }
- return Int(dur.Seconds())
-}
-
-// DurationGetMilliseconds returns duration in milliseconds.
-func DurationGetMilliseconds(val ref.Val) ref.Val {
- dur, ok := val.(Duration)
- if !ok {
- return MaybeNoSuchOverloadErr(val)
- }
- return Int(dur.Milliseconds())
-}
diff --git a/vendor/github.com/google/cel-go/common/types/err.go b/vendor/github.com/google/cel-go/common/types/err.go
deleted file mode 100644
index aa8f94b4f..000000000
--- a/vendor/github.com/google/cel-go/common/types/err.go
+++ /dev/null
@@ -1,146 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "errors"
- "fmt"
- "reflect"
-
- "github.com/google/cel-go/common/types/ref"
-)
-
-// Error interface which allows types types.Err values to be treated as error values.
-type Error interface {
- error
- ref.Val
-}
-
-// Err type which extends the built-in go error and implements ref.Val.
-type Err struct {
- error
-}
-
-var (
- // ErrType singleton.
- ErrType = NewOpaqueType("error")
-
- // errDivideByZero is an error indicating a division by zero of an integer value.
- errDivideByZero = errors.New("division by zero")
- // errModulusByZero is an error indicating a modulus by zero of an integer value.
- errModulusByZero = errors.New("modulus by zero")
- // errIntOverflow is an error representing integer overflow.
- errIntOverflow = errors.New("integer overflow")
- // errUintOverflow is an error representing unsigned integer overflow.
- errUintOverflow = errors.New("unsigned integer overflow")
- // errDurationOverflow is an error representing duration overflow.
- errDurationOverflow = errors.New("duration overflow")
- // errTimestampOverflow is an error representing timestamp overflow.
- errTimestampOverflow = errors.New("timestamp overflow")
- celErrTimestampOverflow = &Err{error: errTimestampOverflow}
-
- // celErrNoSuchOverload indicates that the call arguments did not match a supported method signature.
- celErrNoSuchOverload = NewErr("no such overload")
-)
-
-// NewErr creates a new Err described by the format string and args.
-// TODO: Audit the use of this function and standardize the error messages and codes.
-func NewErr(format string, args ...any) ref.Val {
- return &Err{fmt.Errorf(format, args...)}
-}
-
-// NoSuchOverloadErr returns a new types.Err instance with a no such overload message.
-func NoSuchOverloadErr() ref.Val {
- return celErrNoSuchOverload
-}
-
-// UnsupportedRefValConversionErr returns a types.NewErr instance with a no such conversion
-// message that indicates that the native value could not be converted to a CEL ref.Val.
-func UnsupportedRefValConversionErr(val any) ref.Val {
- return NewErr("unsupported conversion to ref.Val: (%T)%v", val, val)
-}
-
-// MaybeNoSuchOverloadErr returns the error or unknown if the input ref.Val is one of these types,
-// else a new no such overload error.
-func MaybeNoSuchOverloadErr(val ref.Val) ref.Val {
- return ValOrErr(val, "no such overload")
-}
-
-// ValOrErr either returns the existing error or creates a new one.
-// TODO: Audit the use of this function and standardize the error messages and codes.
-func ValOrErr(val ref.Val, format string, args ...any) ref.Val {
- if val == nil || !IsUnknownOrError(val) {
- return NewErr(format, args...)
- }
- return val
-}
-
-// WrapErr wraps an existing Go error value into a CEL Err value.
-func WrapErr(err error) ref.Val {
- return &Err{error: err}
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (e *Err) ConvertToNative(typeDesc reflect.Type) (any, error) {
- return nil, e.error
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (e *Err) ConvertToType(typeVal ref.Type) ref.Val {
- // Errors are not convertible to other representations.
- return e
-}
-
-// Equal implements ref.Val.Equal.
-func (e *Err) Equal(other ref.Val) ref.Val {
- // An error cannot be equal to any other value, so it returns itself.
- return e
-}
-
-// String implements fmt.Stringer.
-func (e *Err) String() string {
- return e.error.Error()
-}
-
-// Type implements ref.Val.Type.
-func (e *Err) Type() ref.Type {
- return ErrType
-}
-
-// Value implements ref.Val.Value.
-func (e *Err) Value() any {
- return e.error
-}
-
-// Is implements errors.Is.
-func (e *Err) Is(target error) bool {
- return e.error.Error() == target.Error()
-}
-
-// Unwrap implements errors.Unwrap.
-func (e *Err) Unwrap() error {
- return e.error
-}
-
-// IsError returns whether the input element ref.Type or ref.Val is equal to
-// the ErrType singleton.
-func IsError(val ref.Val) bool {
- switch val.(type) {
- case *Err:
- return true
- default:
- return false
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/types/int.go b/vendor/github.com/google/cel-go/common/types/int.go
deleted file mode 100644
index 940772aed..000000000
--- a/vendor/github.com/google/cel-go/common/types/int.go
+++ /dev/null
@@ -1,291 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "math"
- "reflect"
- "strconv"
- "time"
-
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
-)
-
-// Int type that implements ref.Val as well as comparison and math operators.
-type Int int64
-
-// Int constants used for comparison results.
-const (
- // IntZero is the zero-value for Int
- IntZero = Int(0)
- IntOne = Int(1)
- IntNegOne = Int(-1)
-)
-
-var (
- // int32WrapperType reflected type for protobuf int32 wrapper type.
- int32WrapperType = reflect.TypeOf(&wrapperspb.Int32Value{})
-
- // int64WrapperType reflected type for protobuf int64 wrapper type.
- int64WrapperType = reflect.TypeOf(&wrapperspb.Int64Value{})
-)
-
-// Add implements traits.Adder.Add.
-func (i Int) Add(other ref.Val) ref.Val {
- otherInt, ok := other.(Int)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- val, err := addInt64Checked(int64(i), int64(otherInt))
- if err != nil {
- return WrapErr(err)
- }
- return Int(val)
-}
-
-// Compare implements traits.Comparer.Compare.
-func (i Int) Compare(other ref.Val) ref.Val {
- switch ov := other.(type) {
- case Double:
- if math.IsNaN(float64(ov)) {
- return NewErr("NaN values cannot be ordered")
- }
- return compareIntDouble(i, ov)
- case Int:
- return compareInt(i, ov)
- case Uint:
- return compareIntUint(i, ov)
- default:
- return MaybeNoSuchOverloadErr(other)
- }
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (i Int) ConvertToNative(typeDesc reflect.Type) (any, error) {
- switch typeDesc.Kind() {
- case reflect.Int, reflect.Int32:
- // Enums are also mapped as int32 derivations.
- // Note, the code doesn't convert to the enum value directly since this is not known, but
- // the net effect with respect to proto-assignment is handled correctly by the reflection
- // Convert method.
- v, err := int64ToInt32Checked(int64(i))
- if err != nil {
- return nil, err
- }
- return reflect.ValueOf(v).Convert(typeDesc).Interface(), nil
- case reflect.Int64:
- return reflect.ValueOf(i).Convert(typeDesc).Interface(), nil
- case reflect.Ptr:
- switch typeDesc {
- case anyValueType:
- // Primitives must be wrapped before being set on an Any field.
- return anypb.New(wrapperspb.Int64(int64(i)))
- case int32WrapperType:
- // Convert the value to a wrapperspb.Int32Value, error on overflow.
- v, err := int64ToInt32Checked(int64(i))
- if err != nil {
- return nil, err
- }
- return wrapperspb.Int32(v), nil
- case int64WrapperType:
- // Convert the value to a wrapperspb.Int64Value.
- return wrapperspb.Int64(int64(i)), nil
- case jsonValueType:
- // The proto-to-JSON conversion rules would convert all 64-bit integer values to JSON
- // decimal strings. Because CEL ints might come from the automatic widening of 32-bit
- // values in protos, the JSON type is chosen dynamically based on the value.
- //
- // - Integers -2^53-1 < n < 2^53-1 are encoded as JSON numbers.
- // - Integers outside this range are encoded as JSON strings.
- //
- // The integer to float range represents the largest interval where such a conversion
- // can round-trip accurately. Thus, conversions from a 32-bit source can expect a JSON
- // number as with protobuf. Those consuming JSON from a 64-bit source must be able to
- // handle either a JSON number or a JSON decimal string. To handle these cases safely
- // the string values must be explicitly converted to int() within a CEL expression;
- // however, it is best to simply stay within the JSON number range when building JSON
- // objects in CEL.
- if i.isJSONSafe() {
- return structpb.NewNumberValue(float64(i)), nil
- }
- // Proto3 to JSON conversion requires string-formatted int64 values
- // since the conversion to floating point would result in truncation.
- return structpb.NewStringValue(strconv.FormatInt(int64(i), 10)), nil
- }
- switch typeDesc.Elem().Kind() {
- case reflect.Int32:
- // Convert the value to a wrapperspb.Int32Value, error on overflow.
- v, err := int64ToInt32Checked(int64(i))
- if err != nil {
- return nil, err
- }
- p := reflect.New(typeDesc.Elem())
- p.Elem().Set(reflect.ValueOf(v).Convert(typeDesc.Elem()))
- return p.Interface(), nil
- case reflect.Int64:
- v := int64(i)
- p := reflect.New(typeDesc.Elem())
- p.Elem().Set(reflect.ValueOf(v).Convert(typeDesc.Elem()))
- return p.Interface(), nil
- }
- case reflect.Interface:
- iv := i.Value()
- if reflect.TypeOf(iv).Implements(typeDesc) {
- return iv, nil
- }
- if reflect.TypeOf(i).Implements(typeDesc) {
- return i, nil
- }
- }
- return nil, fmt.Errorf("unsupported type conversion from 'int' to %v", typeDesc)
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (i Int) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case IntType:
- return i
- case UintType:
- u, err := int64ToUint64Checked(int64(i))
- if err != nil {
- return WrapErr(err)
- }
- return Uint(u)
- case DoubleType:
- return Double(i)
- case StringType:
- return String(fmt.Sprintf("%d", int64(i)))
- case TimestampType:
- // The maximum positive value that can be passed to time.Unix is math.MaxInt64 minus the number
- // of seconds between year 1 and year 1970. See comments on unixToInternal.
- if int64(i) < minUnixTime || int64(i) > maxUnixTime {
- return celErrTimestampOverflow
- }
- return timestampOf(time.Unix(int64(i), 0).UTC())
- case TypeType:
- return IntType
- }
- return NewErr("type conversion error from '%s' to '%s'", IntType, typeVal)
-}
-
-// Divide implements traits.Divider.Divide.
-func (i Int) Divide(other ref.Val) ref.Val {
- otherInt, ok := other.(Int)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- val, err := divideInt64Checked(int64(i), int64(otherInt))
- if err != nil {
- return WrapErr(err)
- }
- return Int(val)
-}
-
-// Equal implements ref.Val.Equal.
-func (i Int) Equal(other ref.Val) ref.Val {
- switch ov := other.(type) {
- case Double:
- if math.IsNaN(float64(ov)) {
- return False
- }
- return Bool(compareIntDouble(i, ov) == 0)
- case Int:
- return Bool(i == ov)
- case Uint:
- return Bool(compareIntUint(i, ov) == 0)
- default:
- return False
- }
-}
-
-// IsZeroValue returns true if integer is equal to 0
-func (i Int) IsZeroValue() bool {
- return i == IntZero
-}
-
-// Modulo implements traits.Modder.Modulo.
-func (i Int) Modulo(other ref.Val) ref.Val {
- otherInt, ok := other.(Int)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- val, err := moduloInt64Checked(int64(i), int64(otherInt))
- if err != nil {
- return WrapErr(err)
- }
- return Int(val)
-}
-
-// Multiply implements traits.Multiplier.Multiply.
-func (i Int) Multiply(other ref.Val) ref.Val {
- otherInt, ok := other.(Int)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- val, err := multiplyInt64Checked(int64(i), int64(otherInt))
- if err != nil {
- return WrapErr(err)
- }
- return Int(val)
-}
-
-// Negate implements traits.Negater.Negate.
-func (i Int) Negate() ref.Val {
- val, err := negateInt64Checked(int64(i))
- if err != nil {
- return WrapErr(err)
- }
- return Int(val)
-}
-
-// Subtract implements traits.Subtractor.Subtract.
-func (i Int) Subtract(subtrahend ref.Val) ref.Val {
- subtraInt, ok := subtrahend.(Int)
- if !ok {
- return MaybeNoSuchOverloadErr(subtrahend)
- }
- val, err := subtractInt64Checked(int64(i), int64(subtraInt))
- if err != nil {
- return WrapErr(err)
- }
- return Int(val)
-}
-
-// Type implements ref.Val.Type.
-func (i Int) Type() ref.Type {
- return IntType
-}
-
-// Value implements ref.Val.Value.
-func (i Int) Value() any {
- return int64(i)
-}
-
-// isJSONSafe indicates whether the int is safely representable as a floating point value in JSON.
-func (i Int) isJSONSafe() bool {
- return i >= minIntJSON && i <= maxIntJSON
-}
-
-const (
- // maxIntJSON is defined as the Number.MAX_SAFE_INTEGER value per EcmaScript 6.
- maxIntJSON = 1<<53 - 1
- // minIntJSON is defined as the Number.MIN_SAFE_INTEGER value per EcmaScript 6.
- minIntJSON = -maxIntJSON
-)
diff --git a/vendor/github.com/google/cel-go/common/types/iterator.go b/vendor/github.com/google/cel-go/common/types/iterator.go
deleted file mode 100644
index 98e9147b6..000000000
--- a/vendor/github.com/google/cel-go/common/types/iterator.go
+++ /dev/null
@@ -1,55 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
-
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-)
-
-var (
- // IteratorType singleton.
- IteratorType = NewObjectType("iterator", traits.IteratorType)
-)
-
-// baseIterator is the basis for list, map, and object iterators.
-//
-// An iterator in and of itself should not be a valid value for comparison, but must implement the
-// `ref.Val` methods in order to be well-supported within instruction arguments processed by the
-// interpreter.
-type baseIterator struct{}
-
-func (*baseIterator) ConvertToNative(typeDesc reflect.Type) (any, error) {
- return nil, fmt.Errorf("type conversion on iterators not supported")
-}
-
-func (*baseIterator) ConvertToType(typeVal ref.Type) ref.Val {
- return NewErr("no such overload")
-}
-
-func (*baseIterator) Equal(other ref.Val) ref.Val {
- return NewErr("no such overload")
-}
-
-func (*baseIterator) Type() ref.Type {
- return IteratorType
-}
-
-func (*baseIterator) Value() any {
- return nil
-}
diff --git a/vendor/github.com/google/cel-go/common/types/json_value.go b/vendor/github.com/google/cel-go/common/types/json_value.go
deleted file mode 100644
index 13a4efe7a..000000000
--- a/vendor/github.com/google/cel-go/common/types/json_value.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "reflect"
-
- structpb "google.golang.org/protobuf/types/known/structpb"
-)
-
-// JSON type constants representing the reflected types of protobuf JSON values.
-var (
- jsonValueType = reflect.TypeOf(&structpb.Value{})
- jsonListValueType = reflect.TypeOf(&structpb.ListValue{})
- jsonStructType = reflect.TypeOf(&structpb.Struct{})
- jsonNullType = reflect.TypeOf(structpb.NullValue_NULL_VALUE)
-)
diff --git a/vendor/github.com/google/cel-go/common/types/list.go b/vendor/github.com/google/cel-go/common/types/list.go
deleted file mode 100644
index d4932b4a9..000000000
--- a/vendor/github.com/google/cel-go/common/types/list.go
+++ /dev/null
@@ -1,523 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
- "strings"
-
- "google.golang.org/protobuf/proto"
- "google.golang.org/protobuf/reflect/protoreflect"
-
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
-)
-
-// NewDynamicList returns a traits.Lister with heterogenous elements.
-// value should be an array of "native" types, i.e. any type that
-// NativeToValue() can convert to a ref.Val.
-func NewDynamicList(adapter Adapter, value any) traits.Lister {
- refValue := reflect.ValueOf(value)
- return &baseList{
- Adapter: adapter,
- value: value,
- size: refValue.Len(),
- get: func(i int) any {
- return refValue.Index(i).Interface()
- },
- }
-}
-
-// NewStringList returns a traits.Lister containing only strings.
-func NewStringList(adapter Adapter, elems []string) traits.Lister {
- return &baseList{
- Adapter: adapter,
- value: elems,
- size: len(elems),
- get: func(i int) any { return elems[i] },
- }
-}
-
-// NewRefValList returns a traits.Lister with ref.Val elements.
-//
-// This type specialization is used with list literals within CEL expressions.
-func NewRefValList(adapter Adapter, elems []ref.Val) traits.Lister {
- return &baseList{
- Adapter: adapter,
- value: elems,
- size: len(elems),
- get: func(i int) any { return elems[i] },
- }
-}
-
-// NewProtoList returns a traits.Lister based on a pb.List instance.
-func NewProtoList(adapter Adapter, list protoreflect.List) traits.Lister {
- return &baseList{
- Adapter: adapter,
- value: list,
- size: list.Len(),
- get: func(i int) any { return list.Get(i).Interface() },
- }
-}
-
-// NewJSONList returns a traits.Lister based on structpb.ListValue instance.
-func NewJSONList(adapter Adapter, l *structpb.ListValue) traits.Lister {
- vals := l.GetValues()
- return &baseList{
- Adapter: adapter,
- value: l,
- size: len(vals),
- get: func(i int) any { return vals[i] },
- }
-}
-
-// NewMutableList creates a new mutable list whose internal state can be modified.
-func NewMutableList(adapter Adapter) traits.MutableLister {
- var mutableValues []ref.Val
- l := &mutableList{
- baseList: &baseList{
- Adapter: adapter,
- value: mutableValues,
- size: 0,
- },
- mutableValues: mutableValues,
- }
- l.get = func(i int) any {
- return l.mutableValues[i]
- }
- return l
-}
-
-// baseList points to a list containing elements of any type.
-// The `value` is an array of native values, and refValue is its reflection object.
-// The `Adapter` enables native type to CEL type conversions.
-type baseList struct {
- Adapter
- value any
-
- // size indicates the number of elements within the list.
- // Since objects are immutable the size of a list is static.
- size int
-
- // get returns a value at the specified integer index.
- // The index is guaranteed to be checked against the list index range.
- get func(int) any
-}
-
-// Add implements the traits.Adder interface method.
-func (l *baseList) Add(other ref.Val) ref.Val {
- otherList, ok := other.(traits.Lister)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- if l.Size() == IntZero {
- return other
- }
- if otherList.Size() == IntZero {
- return l
- }
- return &concatList{
- Adapter: l.Adapter,
- prevList: l,
- nextList: otherList}
-}
-
-// Contains implements the traits.Container interface method.
-func (l *baseList) Contains(elem ref.Val) ref.Val {
- for i := 0; i < l.size; i++ {
- val := l.NativeToValue(l.get(i))
- cmp := elem.Equal(val)
- b, ok := cmp.(Bool)
- if ok && b == True {
- return True
- }
- }
- return False
-}
-
-// ConvertToNative implements the ref.Val interface method.
-func (l *baseList) ConvertToNative(typeDesc reflect.Type) (any, error) {
- // If the underlying list value is assignable to the reflected type return it.
- if reflect.TypeOf(l.value).AssignableTo(typeDesc) {
- return l.value, nil
- }
- // If the list wrapper is assignable to the desired type return it.
- if reflect.TypeOf(l).AssignableTo(typeDesc) {
- return l, nil
- }
- // Attempt to convert the list to a set of well known protobuf types.
- switch typeDesc {
- case anyValueType:
- json, err := l.ConvertToNative(jsonListValueType)
- if err != nil {
- return nil, err
- }
- return anypb.New(json.(proto.Message))
- case jsonValueType, jsonListValueType:
- jsonValues, err :=
- l.ConvertToNative(reflect.TypeOf([]*structpb.Value{}))
- if err != nil {
- return nil, err
- }
- jsonList := &structpb.ListValue{Values: jsonValues.([]*structpb.Value)}
- if typeDesc == jsonListValueType {
- return jsonList, nil
- }
- return structpb.NewListValue(jsonList), nil
- }
- // Non-list conversion.
- if typeDesc.Kind() != reflect.Slice && typeDesc.Kind() != reflect.Array {
- return nil, fmt.Errorf("type conversion error from list to '%v'", typeDesc)
- }
-
- // List conversion.
- // Allow the element ConvertToNative() function to determine whether conversion is possible.
- otherElemType := typeDesc.Elem()
- elemCount := l.size
- nativeList := reflect.MakeSlice(typeDesc, elemCount, elemCount)
- for i := 0; i < elemCount; i++ {
- elem := l.NativeToValue(l.get(i))
- nativeElemVal, err := elem.ConvertToNative(otherElemType)
- if err != nil {
- return nil, err
- }
- nativeList.Index(i).Set(reflect.ValueOf(nativeElemVal))
- }
- return nativeList.Interface(), nil
-}
-
-// ConvertToType implements the ref.Val interface method.
-func (l *baseList) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case ListType:
- return l
- case TypeType:
- return ListType
- }
- return NewErr("type conversion error from '%s' to '%s'", ListType, typeVal)
-}
-
-// Equal implements the ref.Val interface method.
-func (l *baseList) Equal(other ref.Val) ref.Val {
- otherList, ok := other.(traits.Lister)
- if !ok {
- return False
- }
- if l.Size() != otherList.Size() {
- return False
- }
- for i := IntZero; i < l.Size().(Int); i++ {
- thisElem := l.Get(i)
- otherElem := otherList.Get(i)
- elemEq := Equal(thisElem, otherElem)
- if elemEq == False {
- return False
- }
- }
- return True
-}
-
-// Get implements the traits.Indexer interface method.
-func (l *baseList) Get(index ref.Val) ref.Val {
- ind, err := IndexOrError(index)
- if err != nil {
- return ValOrErr(index, err.Error())
- }
- if ind < 0 || ind >= l.size {
- return NewErr("index '%d' out of range in list size '%d'", ind, l.Size())
- }
- return l.NativeToValue(l.get(ind))
-}
-
-// IsZeroValue returns true if the list is empty.
-func (l *baseList) IsZeroValue() bool {
- return l.size == 0
-}
-
-// Iterator implements the traits.Iterable interface method.
-func (l *baseList) Iterator() traits.Iterator {
- return newListIterator(l)
-}
-
-// Size implements the traits.Sizer interface method.
-func (l *baseList) Size() ref.Val {
- return Int(l.size)
-}
-
-// Type implements the ref.Val interface method.
-func (l *baseList) Type() ref.Type {
- return ListType
-}
-
-// Value implements the ref.Val interface method.
-func (l *baseList) Value() any {
- return l.value
-}
-
-// String converts the list to a human readable string form.
-func (l *baseList) String() string {
- var sb strings.Builder
- sb.WriteString("[")
- for i := 0; i < l.size; i++ {
- sb.WriteString(fmt.Sprintf("%v", l.get(i)))
- if i != l.size-1 {
- sb.WriteString(", ")
- }
- }
- sb.WriteString("]")
- return sb.String()
-}
-
-// mutableList aggregates values into its internal storage. For use with internal CEL variables only.
-type mutableList struct {
- *baseList
- mutableValues []ref.Val
-}
-
-// Add copies elements from the other list into the internal storage of the mutable list.
-// The ref.Val returned by Add is the receiver.
-func (l *mutableList) Add(other ref.Val) ref.Val {
- switch otherList := other.(type) {
- case *mutableList:
- l.mutableValues = append(l.mutableValues, otherList.mutableValues...)
- l.size += len(otherList.mutableValues)
- case traits.Lister:
- for i := IntZero; i < otherList.Size().(Int); i++ {
- l.size++
- l.mutableValues = append(l.mutableValues, otherList.Get(i))
- }
- default:
- return MaybeNoSuchOverloadErr(otherList)
- }
- return l
-}
-
-// ToImmutableList returns an immutable list based on the internal storage of the mutable list.
-func (l *mutableList) ToImmutableList() traits.Lister {
- // The reference to internal state is guaranteed to be safe as this call is only performed
- // when mutations have been completed.
- return NewRefValList(l.Adapter, l.mutableValues)
-}
-
-// concatList combines two list implementations together into a view.
-// The `Adapter` enables native type to CEL type conversions.
-type concatList struct {
- Adapter
- value any
- prevList traits.Lister
- nextList traits.Lister
-}
-
-// Add implements the traits.Adder interface method.
-func (l *concatList) Add(other ref.Val) ref.Val {
- otherList, ok := other.(traits.Lister)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- if l.Size() == IntZero {
- return other
- }
- if otherList.Size() == IntZero {
- return l
- }
- return &concatList{
- Adapter: l.Adapter,
- prevList: l,
- nextList: otherList}
-}
-
-// Contains implements the traits.Container interface method.
-func (l *concatList) Contains(elem ref.Val) ref.Val {
- // The concat list relies on the IsErrorOrUnknown checks against the input element to be
- // performed by the `prevList` and/or `nextList`.
- prev := l.prevList.Contains(elem)
- // Short-circuit the return if the elem was found in the prev list.
- if prev == True {
- return prev
- }
- // Return if the elem was found in the next list.
- next := l.nextList.Contains(elem)
- if next == True {
- return next
- }
- // Handle the case where an error or unknown was encountered before checking next.
- if IsUnknownOrError(prev) {
- return prev
- }
- // Otherwise, rely on the next value as the representative result.
- return next
-}
-
-// ConvertToNative implements the ref.Val interface method.
-func (l *concatList) ConvertToNative(typeDesc reflect.Type) (any, error) {
- combined := NewDynamicList(l.Adapter, l.Value().([]any))
- return combined.ConvertToNative(typeDesc)
-}
-
-// ConvertToType implements the ref.Val interface method.
-func (l *concatList) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case ListType:
- return l
- case TypeType:
- return ListType
- }
- return NewErr("type conversion error from '%s' to '%s'", ListType, typeVal)
-}
-
-// Equal implements the ref.Val interface method.
-func (l *concatList) Equal(other ref.Val) ref.Val {
- otherList, ok := other.(traits.Lister)
- if !ok {
- return False
- }
- if l.Size() != otherList.Size() {
- return False
- }
- var maybeErr ref.Val
- for i := IntZero; i < l.Size().(Int); i++ {
- thisElem := l.Get(i)
- otherElem := otherList.Get(i)
- elemEq := Equal(thisElem, otherElem)
- if elemEq == False {
- return False
- }
- if maybeErr == nil && IsUnknownOrError(elemEq) {
- maybeErr = elemEq
- }
- }
- if maybeErr != nil {
- return maybeErr
- }
- return True
-}
-
-// Get implements the traits.Indexer interface method.
-func (l *concatList) Get(index ref.Val) ref.Val {
- ind, err := IndexOrError(index)
- if err != nil {
- return ValOrErr(index, err.Error())
- }
- i := Int(ind)
- if i < l.prevList.Size().(Int) {
- return l.prevList.Get(i)
- }
- offset := i - l.prevList.Size().(Int)
- return l.nextList.Get(offset)
-}
-
-// IsZeroValue returns true if the list is empty.
-func (l *concatList) IsZeroValue() bool {
- return l.Size().(Int) == 0
-}
-
-// Iterator implements the traits.Iterable interface method.
-func (l *concatList) Iterator() traits.Iterator {
- return newListIterator(l)
-}
-
-// Size implements the traits.Sizer interface method.
-func (l *concatList) Size() ref.Val {
- return l.prevList.Size().(Int).Add(l.nextList.Size())
-}
-
-// String converts the concatenated list to a human-readable string.
-func (l *concatList) String() string {
- var sb strings.Builder
- sb.WriteString("[")
- for i := Int(0); i < l.Size().(Int); i++ {
- sb.WriteString(fmt.Sprintf("%v", l.Get(i)))
- if i != l.Size().(Int)-1 {
- sb.WriteString(", ")
- }
- }
- sb.WriteString("]")
- return sb.String()
-}
-
-// Type implements the ref.Val interface method.
-func (l *concatList) Type() ref.Type {
- return ListType
-}
-
-// Value implements the ref.Val interface method.
-func (l *concatList) Value() any {
- if l.value == nil {
- merged := make([]any, l.Size().(Int))
- prevLen := l.prevList.Size().(Int)
- for i := Int(0); i < prevLen; i++ {
- merged[i] = l.prevList.Get(i).Value()
- }
- nextLen := l.nextList.Size().(Int)
- for j := Int(0); j < nextLen; j++ {
- merged[prevLen+j] = l.nextList.Get(j).Value()
- }
- l.value = merged
- }
- return l.value
-}
-
-func newListIterator(listValue traits.Lister) traits.Iterator {
- return &listIterator{
- listValue: listValue,
- len: listValue.Size().(Int),
- }
-}
-
-type listIterator struct {
- *baseIterator
- listValue traits.Lister
- cursor Int
- len Int
-}
-
-// HasNext implements the traits.Iterator interface method.
-func (it *listIterator) HasNext() ref.Val {
- return Bool(it.cursor < it.len)
-}
-
-// Next implements the traits.Iterator interface method.
-func (it *listIterator) Next() ref.Val {
- if it.HasNext() == True {
- index := it.cursor
- it.cursor++
- return it.listValue.Get(index)
- }
- return nil
-}
-
-// IndexOrError converts an input index value into either a lossless integer index or an error.
-func IndexOrError(index ref.Val) (int, error) {
- switch iv := index.(type) {
- case Int:
- return int(iv), nil
- case Double:
- if ik, ok := doubleToInt64Lossless(float64(iv)); ok {
- return int(ik), nil
- }
- return -1, fmt.Errorf("unsupported index value %v in list", index)
- case Uint:
- if ik, ok := uint64ToInt64Lossless(uint64(iv)); ok {
- return int(ik), nil
- }
- return -1, fmt.Errorf("unsupported index value %v in list", index)
- default:
- return -1, fmt.Errorf("unsupported index type '%s' in list", index.Type())
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/types/map.go b/vendor/github.com/google/cel-go/common/types/map.go
deleted file mode 100644
index 739b7aab0..000000000
--- a/vendor/github.com/google/cel-go/common/types/map.go
+++ /dev/null
@@ -1,854 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
- "strings"
-
- "github.com/stoewer/go-strcase"
- "google.golang.org/protobuf/proto"
- "google.golang.org/protobuf/reflect/protoreflect"
-
- "github.com/google/cel-go/common/types/pb"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
-)
-
-// NewDynamicMap returns a traits.Mapper value with dynamic key, value pairs.
-func NewDynamicMap(adapter Adapter, value any) traits.Mapper {
- refValue := reflect.ValueOf(value)
- return &baseMap{
- Adapter: adapter,
- mapAccessor: newReflectMapAccessor(adapter, refValue),
- value: value,
- size: refValue.Len(),
- }
-}
-
-// NewJSONStruct creates a traits.Mapper implementation backed by a JSON struct that has been
-// encoded in protocol buffer form.
-//
-// The `adapter` argument provides type adaptation capabilities from proto to CEL.
-func NewJSONStruct(adapter Adapter, value *structpb.Struct) traits.Mapper {
- fields := value.GetFields()
- return &baseMap{
- Adapter: adapter,
- mapAccessor: newJSONStructAccessor(adapter, fields),
- value: value,
- size: len(fields),
- }
-}
-
-// NewRefValMap returns a specialized traits.Mapper with CEL valued keys and values.
-func NewRefValMap(adapter Adapter, value map[ref.Val]ref.Val) traits.Mapper {
- return &baseMap{
- Adapter: adapter,
- mapAccessor: newRefValMapAccessor(value),
- value: value,
- size: len(value),
- }
-}
-
-// NewStringInterfaceMap returns a specialized traits.Mapper with string keys and interface values.
-func NewStringInterfaceMap(adapter Adapter, value map[string]any) traits.Mapper {
- return &baseMap{
- Adapter: adapter,
- mapAccessor: newStringIfaceMapAccessor(adapter, value),
- value: value,
- size: len(value),
- }
-}
-
-// NewStringStringMap returns a specialized traits.Mapper with string keys and values.
-func NewStringStringMap(adapter Adapter, value map[string]string) traits.Mapper {
- return &baseMap{
- Adapter: adapter,
- mapAccessor: newStringMapAccessor(value),
- value: value,
- size: len(value),
- }
-}
-
-// NewProtoMap returns a specialized traits.Mapper for handling protobuf map values.
-func NewProtoMap(adapter Adapter, value *pb.Map) traits.Mapper {
- return &protoMap{
- Adapter: adapter,
- value: value,
- }
-}
-
-// mapAccessor is a private interface for finding values within a map and iterating over the keys.
-// This interface implements portions of the API surface area required by the traits.Mapper
-// interface.
-type mapAccessor interface {
- // Find returns a value, if one exists, for the input key.
- //
- // If the key is not found the function returns (nil, false).
- Find(ref.Val) (ref.Val, bool)
-
- // Iterator returns an Iterator over the map key set.
- Iterator() traits.Iterator
-}
-
-// baseMap is a reflection based map implementation designed to handle a variety of map-like types.
-//
-// Since CEL is side-effect free, the base map represents an immutable object.
-type baseMap struct {
- // TypeAdapter used to convert keys and values accessed within the map.
- Adapter
-
- // mapAccessor interface implementation used to find and iterate over map keys.
- mapAccessor
-
- // value is the native Go value upon which the map type operators.
- value any
-
- // size is the number of entries in the map.
- size int
-}
-
-// Contains implements the traits.Container interface method.
-func (m *baseMap) Contains(index ref.Val) ref.Val {
- _, found := m.Find(index)
- return Bool(found)
-}
-
-// ConvertToNative implements the ref.Val interface method.
-func (m *baseMap) ConvertToNative(typeDesc reflect.Type) (any, error) {
- // If the map is already assignable to the desired type return it, e.g. interfaces and
- // maps with the same key value types.
- if reflect.TypeOf(m.value).AssignableTo(typeDesc) {
- return m.value, nil
- }
- if reflect.TypeOf(m).AssignableTo(typeDesc) {
- return m, nil
- }
- switch typeDesc {
- case anyValueType:
- json, err := m.ConvertToNative(jsonStructType)
- if err != nil {
- return nil, err
- }
- return anypb.New(json.(proto.Message))
- case jsonValueType, jsonStructType:
- jsonEntries, err :=
- m.ConvertToNative(reflect.TypeOf(map[string]*structpb.Value{}))
- if err != nil {
- return nil, err
- }
- jsonMap := &structpb.Struct{Fields: jsonEntries.(map[string]*structpb.Value)}
- if typeDesc == jsonStructType {
- return jsonMap, nil
- }
- return structpb.NewStructValue(jsonMap), nil
- }
-
- // Unwrap pointers, but track their use.
- isPtr := false
- if typeDesc.Kind() == reflect.Ptr {
- tk := typeDesc
- typeDesc = typeDesc.Elem()
- if typeDesc.Kind() == reflect.Ptr {
- return nil, fmt.Errorf("unsupported type conversion to '%v'", tk)
- }
- isPtr = true
- }
- switch typeDesc.Kind() {
- // Map conversion.
- case reflect.Map:
- otherKey := typeDesc.Key()
- otherElem := typeDesc.Elem()
- nativeMap := reflect.MakeMapWithSize(typeDesc, m.size)
- it := m.Iterator()
- for it.HasNext() == True {
- key := it.Next()
- refKeyValue, err := key.ConvertToNative(otherKey)
- if err != nil {
- return nil, err
- }
- refElemValue, err := m.Get(key).ConvertToNative(otherElem)
- if err != nil {
- return nil, err
- }
- nativeMap.SetMapIndex(reflect.ValueOf(refKeyValue), reflect.ValueOf(refElemValue))
- }
- return nativeMap.Interface(), nil
- case reflect.Struct:
- nativeStructPtr := reflect.New(typeDesc)
- nativeStruct := nativeStructPtr.Elem()
- it := m.Iterator()
- for it.HasNext() == True {
- key := it.Next()
- // Ensure the field name being referenced is exported.
- // Only exported (public) field names can be set by reflection, where the name
- // must be at least one character in length and start with an upper-case letter.
- fieldName := key.ConvertToType(StringType)
- if IsError(fieldName) {
- return nil, fieldName.(*Err)
- }
- name := string(fieldName.(String))
- name = strcase.UpperCamelCase(name)
- fieldRef := nativeStruct.FieldByName(name)
- if !fieldRef.IsValid() {
- return nil, fmt.Errorf("type conversion error, no such field '%s' in type '%v'", name, typeDesc)
- }
- fieldValue, err := m.Get(key).ConvertToNative(fieldRef.Type())
- if err != nil {
- return nil, err
- }
- fieldRef.Set(reflect.ValueOf(fieldValue))
- }
- if isPtr {
- return nativeStructPtr.Interface(), nil
- }
- return nativeStruct.Interface(), nil
- }
- return nil, fmt.Errorf("type conversion error from map to '%v'", typeDesc)
-}
-
-// ConvertToType implements the ref.Val interface method.
-func (m *baseMap) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case MapType:
- return m
- case TypeType:
- return MapType
- }
- return NewErr("type conversion error from '%s' to '%s'", MapType, typeVal)
-}
-
-// Equal implements the ref.Val interface method.
-func (m *baseMap) Equal(other ref.Val) ref.Val {
- otherMap, ok := other.(traits.Mapper)
- if !ok {
- return False
- }
- if m.Size() != otherMap.Size() {
- return False
- }
- it := m.Iterator()
- for it.HasNext() == True {
- key := it.Next()
- thisVal, _ := m.Find(key)
- otherVal, found := otherMap.Find(key)
- if !found {
- return False
- }
- valEq := Equal(thisVal, otherVal)
- if valEq == False {
- return False
- }
- }
- return True
-}
-
-// Get implements the traits.Indexer interface method.
-func (m *baseMap) Get(key ref.Val) ref.Val {
- v, found := m.Find(key)
- if !found {
- return ValOrErr(v, "no such key: %v", key)
- }
- return v
-}
-
-// IsZeroValue returns true if the map is empty.
-func (m *baseMap) IsZeroValue() bool {
- return m.size == 0
-}
-
-// Size implements the traits.Sizer interface method.
-func (m *baseMap) Size() ref.Val {
- return Int(m.size)
-}
-
-// String converts the map into a human-readable string.
-func (m *baseMap) String() string {
- var sb strings.Builder
- sb.WriteString("{")
- it := m.Iterator()
- i := 0
- for it.HasNext() == True {
- k := it.Next()
- v, _ := m.Find(k)
- sb.WriteString(fmt.Sprintf("%v: %v", k, v))
- if i != m.size-1 {
- sb.WriteString(", ")
- }
- i++
- }
- sb.WriteString("}")
- return sb.String()
-}
-
-// Type implements the ref.Val interface method.
-func (m *baseMap) Type() ref.Type {
- return MapType
-}
-
-// Value implements the ref.Val interface method.
-func (m *baseMap) Value() any {
- return m.value
-}
-
-func newJSONStructAccessor(adapter Adapter, st map[string]*structpb.Value) mapAccessor {
- return &jsonStructAccessor{
- Adapter: adapter,
- st: st,
- }
-}
-
-type jsonStructAccessor struct {
- Adapter
- st map[string]*structpb.Value
-}
-
-// Find searches the json struct field map for the input key value and returns (value, true) if
-// found.
-//
-// If the key is not found the function returns (nil, false).
-func (a *jsonStructAccessor) Find(key ref.Val) (ref.Val, bool) {
- strKey, ok := key.(String)
- if !ok {
- return nil, false
- }
- keyVal, found := a.st[string(strKey)]
- if !found {
- return nil, false
- }
- return a.NativeToValue(keyVal), true
-}
-
-// Iterator creates a new traits.Iterator from the set of JSON struct field names.
-func (a *jsonStructAccessor) Iterator() traits.Iterator {
- // Copy the keys to make their order stable.
- mapKeys := make([]string, len(a.st))
- i := 0
- for k := range a.st {
- mapKeys[i] = k
- i++
- }
- return &stringKeyIterator{
- mapKeys: mapKeys,
- len: len(mapKeys),
- }
-}
-
-func newReflectMapAccessor(adapter Adapter, value reflect.Value) mapAccessor {
- keyType := value.Type().Key()
- return &reflectMapAccessor{
- Adapter: adapter,
- refValue: value,
- keyType: keyType,
- }
-}
-
-type reflectMapAccessor struct {
- Adapter
- refValue reflect.Value
- keyType reflect.Type
-}
-
-// Find converts the input key to a native Golang type and then uses reflection to find the key,
-// returning (value, true) if present.
-//
-// If the key is not found the function returns (nil, false).
-func (m *reflectMapAccessor) Find(key ref.Val) (ref.Val, bool) {
- if m.refValue.Len() == 0 {
- return nil, false
- }
- if keyVal, found := m.findInternal(key); found {
- return keyVal, true
- }
- switch k := key.(type) {
- // Double is not a valid proto map key type, so check for the key as an int or uint.
- case Double:
- if ik, ok := doubleToInt64Lossless(float64(k)); ok {
- if keyVal, found := m.findInternal(Int(ik)); found {
- return keyVal, true
- }
- }
- if uk, ok := doubleToUint64Lossless(float64(k)); ok {
- return m.findInternal(Uint(uk))
- }
- // map keys of type double are not supported.
- case Int:
- if uk, ok := int64ToUint64Lossless(int64(k)); ok {
- return m.findInternal(Uint(uk))
- }
- case Uint:
- if ik, ok := uint64ToInt64Lossless(uint64(k)); ok {
- return m.findInternal(Int(ik))
- }
- }
- return nil, false
-}
-
-// findInternal attempts to convert the incoming key to the map's internal native type
-// and then returns the value, if found.
-func (m *reflectMapAccessor) findInternal(key ref.Val) (ref.Val, bool) {
- k, err := key.ConvertToNative(m.keyType)
- if err != nil {
- return nil, false
- }
- refKey := reflect.ValueOf(k)
- val := m.refValue.MapIndex(refKey)
- if val.IsValid() {
- return m.NativeToValue(val.Interface()), true
- }
- return nil, false
-}
-
-// Iterator creates a Golang reflection based traits.Iterator.
-func (m *reflectMapAccessor) Iterator() traits.Iterator {
- return &mapIterator{
- Adapter: m.Adapter,
- mapKeys: m.refValue.MapRange(),
- len: m.refValue.Len(),
- }
-}
-
-func newRefValMapAccessor(mapVal map[ref.Val]ref.Val) mapAccessor {
- return &refValMapAccessor{mapVal: mapVal}
-}
-
-type refValMapAccessor struct {
- mapVal map[ref.Val]ref.Val
-}
-
-// Find uses native map accesses to find the key, returning (value, true) if present.
-//
-// If the key is not found the function returns (nil, false).
-func (a *refValMapAccessor) Find(key ref.Val) (ref.Val, bool) {
- if len(a.mapVal) == 0 {
- return nil, false
- }
- if keyVal, found := a.mapVal[key]; found {
- return keyVal, true
- }
- switch k := key.(type) {
- case Double:
- if ik, ok := doubleToInt64Lossless(float64(k)); ok {
- if keyVal, found := a.mapVal[Int(ik)]; found {
- return keyVal, found
- }
- }
- if uk, ok := doubleToUint64Lossless(float64(k)); ok {
- keyVal, found := a.mapVal[Uint(uk)]
- return keyVal, found
- }
- // map keys of type double are not supported.
- case Int:
- if uk, ok := int64ToUint64Lossless(int64(k)); ok {
- keyVal, found := a.mapVal[Uint(uk)]
- return keyVal, found
- }
- case Uint:
- if ik, ok := uint64ToInt64Lossless(uint64(k)); ok {
- keyVal, found := a.mapVal[Int(ik)]
- return keyVal, found
- }
- }
- return nil, false
-}
-
-// Iterator produces a new traits.Iterator which iterates over the map keys via Golang reflection.
-func (a *refValMapAccessor) Iterator() traits.Iterator {
- return &mapIterator{
- Adapter: DefaultTypeAdapter,
- mapKeys: reflect.ValueOf(a.mapVal).MapRange(),
- len: len(a.mapVal),
- }
-}
-
-func newStringMapAccessor(strMap map[string]string) mapAccessor {
- return &stringMapAccessor{mapVal: strMap}
-}
-
-type stringMapAccessor struct {
- mapVal map[string]string
-}
-
-// Find uses native map accesses to find the key, returning (value, true) if present.
-//
-// If the key is not found the function returns (nil, false).
-func (a *stringMapAccessor) Find(key ref.Val) (ref.Val, bool) {
- strKey, ok := key.(String)
- if !ok {
- return nil, false
- }
- keyVal, found := a.mapVal[string(strKey)]
- if !found {
- return nil, false
- }
- return String(keyVal), true
-}
-
-// Iterator creates a new traits.Iterator from the string key set of the map.
-func (a *stringMapAccessor) Iterator() traits.Iterator {
- // Copy the keys to make their order stable.
- mapKeys := make([]string, len(a.mapVal))
- i := 0
- for k := range a.mapVal {
- mapKeys[i] = k
- i++
- }
- return &stringKeyIterator{
- mapKeys: mapKeys,
- len: len(mapKeys),
- }
-}
-
-func newStringIfaceMapAccessor(adapter Adapter, mapVal map[string]any) mapAccessor {
- return &stringIfaceMapAccessor{
- Adapter: adapter,
- mapVal: mapVal,
- }
-}
-
-type stringIfaceMapAccessor struct {
- Adapter
- mapVal map[string]any
-}
-
-// Find uses native map accesses to find the key, returning (value, true) if present.
-//
-// If the key is not found the function returns (nil, false).
-func (a *stringIfaceMapAccessor) Find(key ref.Val) (ref.Val, bool) {
- strKey, ok := key.(String)
- if !ok {
- return nil, false
- }
- keyVal, found := a.mapVal[string(strKey)]
- if !found {
- return nil, false
- }
- return a.NativeToValue(keyVal), true
-}
-
-// Iterator creates a new traits.Iterator from the string key set of the map.
-func (a *stringIfaceMapAccessor) Iterator() traits.Iterator {
- // Copy the keys to make their order stable.
- mapKeys := make([]string, len(a.mapVal))
- i := 0
- for k := range a.mapVal {
- mapKeys[i] = k
- i++
- }
- return &stringKeyIterator{
- mapKeys: mapKeys,
- len: len(mapKeys),
- }
-}
-
-// protoMap is a specialized, separate implementation of the traits.Mapper interfaces tailored to
-// accessing protoreflect.Map values.
-type protoMap struct {
- Adapter
- value *pb.Map
-}
-
-// Contains returns whether the map contains the given key.
-func (m *protoMap) Contains(key ref.Val) ref.Val {
- _, found := m.Find(key)
- return Bool(found)
-}
-
-// ConvertToNative implements the ref.Val interface method.
-//
-// Note, assignment to Golang struct types is not yet supported.
-func (m *protoMap) ConvertToNative(typeDesc reflect.Type) (any, error) {
- // If the map is already assignable to the desired type return it, e.g. interfaces and
- // maps with the same key value types.
- switch typeDesc {
- case anyValueType:
- json, err := m.ConvertToNative(jsonStructType)
- if err != nil {
- return nil, err
- }
- return anypb.New(json.(proto.Message))
- case jsonValueType, jsonStructType:
- jsonEntries, err :=
- m.ConvertToNative(reflect.TypeOf(map[string]*structpb.Value{}))
- if err != nil {
- return nil, err
- }
- jsonMap := &structpb.Struct{
- Fields: jsonEntries.(map[string]*structpb.Value)}
- if typeDesc == jsonStructType {
- return jsonMap, nil
- }
- return structpb.NewStructValue(jsonMap), nil
- }
- switch typeDesc.Kind() {
- case reflect.Struct, reflect.Ptr:
- if reflect.TypeOf(m.value).AssignableTo(typeDesc) {
- return m.value, nil
- }
- if reflect.TypeOf(m).AssignableTo(typeDesc) {
- return m, nil
- }
- }
- if typeDesc.Kind() != reflect.Map {
- return nil, fmt.Errorf("unsupported type conversion: %v to map", typeDesc)
- }
-
- keyType := m.value.KeyType.ReflectType()
- valType := m.value.ValueType.ReflectType()
- otherKeyType := typeDesc.Key()
- otherValType := typeDesc.Elem()
- mapVal := reflect.MakeMapWithSize(typeDesc, m.value.Len())
- var err error
- m.value.Range(func(key protoreflect.MapKey, val protoreflect.Value) bool {
- ntvKey := key.Interface()
- ntvVal := val.Interface()
- switch pv := ntvVal.(type) {
- case protoreflect.Message:
- ntvVal = pv.Interface()
- }
- if keyType == otherKeyType && valType == otherValType {
- mapVal.SetMapIndex(reflect.ValueOf(ntvKey), reflect.ValueOf(ntvVal))
- return true
- }
- celKey := m.NativeToValue(ntvKey)
- celVal := m.NativeToValue(ntvVal)
- ntvKey, err = celKey.ConvertToNative(otherKeyType)
- if err != nil {
- // early terminate the range loop.
- return false
- }
- ntvVal, err = celVal.ConvertToNative(otherValType)
- if err != nil {
- // early terminate the range loop.
- return false
- }
- mapVal.SetMapIndex(reflect.ValueOf(ntvKey), reflect.ValueOf(ntvVal))
- return true
- })
- if err != nil {
- return nil, err
- }
- return mapVal.Interface(), nil
-}
-
-// ConvertToType implements the ref.Val interface method.
-func (m *protoMap) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case MapType:
- return m
- case TypeType:
- return MapType
- }
- return NewErr("type conversion error from '%s' to '%s'", MapType, typeVal)
-}
-
-// Equal implements the ref.Val interface method.
-func (m *protoMap) Equal(other ref.Val) ref.Val {
- otherMap, ok := other.(traits.Mapper)
- if !ok {
- return False
- }
- if m.value.Map.Len() != int(otherMap.Size().(Int)) {
- return False
- }
- var retVal ref.Val = True
- m.value.Range(func(key protoreflect.MapKey, val protoreflect.Value) bool {
- keyVal := m.NativeToValue(key.Interface())
- valVal := m.NativeToValue(val)
- otherVal, found := otherMap.Find(keyVal)
- if !found {
- retVal = False
- return false
- }
- valEq := Equal(valVal, otherVal)
- if valEq != True {
- retVal = valEq
- return false
- }
- return true
- })
- return retVal
-}
-
-// Find returns whether the protoreflect.Map contains the input key.
-//
-// If the key is not found the function returns (nil, false).
-func (m *protoMap) Find(key ref.Val) (ref.Val, bool) {
- if keyVal, found := m.findInternal(key); found {
- return keyVal, true
- }
- switch k := key.(type) {
- // Double is not a valid proto map key type, so check for the key as an int or uint.
- case Double:
- if ik, ok := doubleToInt64Lossless(float64(k)); ok {
- if keyVal, found := m.findInternal(Int(ik)); found {
- return keyVal, true
- }
- }
- if uk, ok := doubleToUint64Lossless(float64(k)); ok {
- return m.findInternal(Uint(uk))
- }
- // map keys of type double are not supported.
- case Int:
- if uk, ok := int64ToUint64Lossless(int64(k)); ok {
- return m.findInternal(Uint(uk))
- }
- case Uint:
- if ik, ok := uint64ToInt64Lossless(uint64(k)); ok {
- return m.findInternal(Int(ik))
- }
- }
- return nil, false
-}
-
-// findInternal attempts to convert the incoming key to the map's internal native type
-// and then returns the value, if found.
-func (m *protoMap) findInternal(key ref.Val) (ref.Val, bool) {
- // Convert the input key to the expected protobuf key type.
- ntvKey, err := key.ConvertToNative(m.value.KeyType.ReflectType())
- if err != nil {
- return nil, false
- }
- // Use protoreflection to get the key value.
- val := m.value.Get(protoreflect.ValueOf(ntvKey).MapKey())
- if !val.IsValid() {
- return nil, false
- }
- // Perform nominal type unwrapping from the input value.
- switch v := val.Interface().(type) {
- case protoreflect.List, protoreflect.Map:
- // Maps do not support list or map values
- return nil, false
- default:
- return m.NativeToValue(v), true
- }
-}
-
-// Get implements the traits.Indexer interface method.
-func (m *protoMap) Get(key ref.Val) ref.Val {
- v, found := m.Find(key)
- if !found {
- return ValOrErr(v, "no such key: %v", key)
- }
- return v
-}
-
-// IsZeroValue returns true if the map is empty.
-func (m *protoMap) IsZeroValue() bool {
- return m.value.Len() == 0
-}
-
-// Iterator implements the traits.Iterable interface method.
-func (m *protoMap) Iterator() traits.Iterator {
- // Copy the keys to make their order stable.
- mapKeys := make([]protoreflect.MapKey, 0, m.value.Len())
- m.value.Range(func(k protoreflect.MapKey, v protoreflect.Value) bool {
- mapKeys = append(mapKeys, k)
- return true
- })
- return &protoMapIterator{
- Adapter: m.Adapter,
- mapKeys: mapKeys,
- len: m.value.Len(),
- }
-}
-
-// Size returns the number of entries in the protoreflect.Map.
-func (m *protoMap) Size() ref.Val {
- return Int(m.value.Len())
-}
-
-// Type implements the ref.Val interface method.
-func (m *protoMap) Type() ref.Type {
- return MapType
-}
-
-// Value implements the ref.Val interface method.
-func (m *protoMap) Value() any {
- return m.value
-}
-
-type mapIterator struct {
- *baseIterator
- Adapter
- mapKeys *reflect.MapIter
- cursor int
- len int
-}
-
-// HasNext implements the traits.Iterator interface method.
-func (it *mapIterator) HasNext() ref.Val {
- return Bool(it.cursor < it.len)
-}
-
-// Next implements the traits.Iterator interface method.
-func (it *mapIterator) Next() ref.Val {
- if it.HasNext() == True && it.mapKeys.Next() {
- it.cursor++
- refKey := it.mapKeys.Key()
- return it.NativeToValue(refKey.Interface())
- }
- return nil
-}
-
-type protoMapIterator struct {
- *baseIterator
- Adapter
- mapKeys []protoreflect.MapKey
- cursor int
- len int
-}
-
-// HasNext implements the traits.Iterator interface method.
-func (it *protoMapIterator) HasNext() ref.Val {
- return Bool(it.cursor < it.len)
-}
-
-// Next implements the traits.Iterator interface method.
-func (it *protoMapIterator) Next() ref.Val {
- if it.HasNext() == True {
- index := it.cursor
- it.cursor++
- refKey := it.mapKeys[index]
- return it.NativeToValue(refKey.Interface())
- }
- return nil
-}
-
-type stringKeyIterator struct {
- *baseIterator
- mapKeys []string
- cursor int
- len int
-}
-
-// HasNext implements the traits.Iterator interface method.
-func (it *stringKeyIterator) HasNext() ref.Val {
- return Bool(it.cursor < it.len)
-}
-
-// Next implements the traits.Iterator interface method.
-func (it *stringKeyIterator) Next() ref.Val {
- if it.HasNext() == True {
- index := it.cursor
- it.cursor++
- return String(it.mapKeys[index])
- }
- return nil
-}
diff --git a/vendor/github.com/google/cel-go/common/types/null.go b/vendor/github.com/google/cel-go/common/types/null.go
deleted file mode 100644
index 926ca3dc9..000000000
--- a/vendor/github.com/google/cel-go/common/types/null.go
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
-
- "google.golang.org/protobuf/proto"
-
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
-)
-
-// Null type implementation.
-type Null structpb.NullValue
-
-var (
- // NullValue singleton.
- NullValue = Null(structpb.NullValue_NULL_VALUE)
-
- // golang reflect type for Null values.
- nullReflectType = reflect.TypeOf(NullValue)
-)
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (n Null) ConvertToNative(typeDesc reflect.Type) (any, error) {
- switch typeDesc.Kind() {
- case reflect.Int32:
- switch typeDesc {
- case jsonNullType:
- return structpb.NullValue_NULL_VALUE, nil
- case nullReflectType:
- return n, nil
- }
- case reflect.Ptr:
- switch typeDesc {
- case anyValueType:
- // Convert to a JSON-null before packing to an Any field since the enum value for JSON
- // null cannot be packed directly.
- pb, err := n.ConvertToNative(jsonValueType)
- if err != nil {
- return nil, err
- }
- return anypb.New(pb.(proto.Message))
- case jsonValueType:
- return structpb.NewNullValue(), nil
- case boolWrapperType, byteWrapperType, doubleWrapperType, floatWrapperType,
- int32WrapperType, int64WrapperType, stringWrapperType, uint32WrapperType,
- uint64WrapperType:
- return nil, nil
- }
- case reflect.Interface:
- nv := n.Value()
- if reflect.TypeOf(nv).Implements(typeDesc) {
- return nv, nil
- }
- if reflect.TypeOf(n).Implements(typeDesc) {
- return n, nil
- }
- }
- // If the type conversion isn't supported return an error.
- return nil, fmt.Errorf("type conversion error from '%v' to '%v'", NullType, typeDesc)
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (n Null) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case StringType:
- return String("null")
- case NullType:
- return n
- case TypeType:
- return NullType
- }
- return NewErr("type conversion error from '%s' to '%s'", NullType, typeVal)
-}
-
-// Equal implements ref.Val.Equal.
-func (n Null) Equal(other ref.Val) ref.Val {
- return Bool(NullType == other.Type())
-}
-
-// IsZeroValue returns true as null always represents an absent value.
-func (n Null) IsZeroValue() bool {
- return true
-}
-
-// Type implements ref.Val.Type.
-func (n Null) Type() ref.Type {
- return NullType
-}
-
-// Value implements ref.Val.Value.
-func (n Null) Value() any {
- return structpb.NullValue_NULL_VALUE
-}
diff --git a/vendor/github.com/google/cel-go/common/types/object.go b/vendor/github.com/google/cel-go/common/types/object.go
deleted file mode 100644
index 8ba0af9fb..000000000
--- a/vendor/github.com/google/cel-go/common/types/object.go
+++ /dev/null
@@ -1,165 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
-
- "google.golang.org/protobuf/encoding/protojson"
- "google.golang.org/protobuf/proto"
-
- "github.com/google/cel-go/common/types/pb"
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
-)
-
-type protoObj struct {
- Adapter
- value proto.Message
- typeDesc *pb.TypeDescription
- typeValue ref.Val
-}
-
-// NewObject returns an object based on a proto.Message value which handles
-// conversion between protobuf type values and expression type values.
-// Objects support indexing and iteration.
-//
-// Note: the type value is pulled from the list of registered types within the
-// type provider. If the proto type is not registered within the type provider,
-// then this will result in an error within the type adapter / provider.
-func NewObject(adapter Adapter,
- typeDesc *pb.TypeDescription,
- typeValue ref.Val,
- value proto.Message) ref.Val {
- return &protoObj{
- Adapter: adapter,
- value: value,
- typeDesc: typeDesc,
- typeValue: typeValue}
-}
-
-func (o *protoObj) ConvertToNative(typeDesc reflect.Type) (any, error) {
- srcPB := o.value
- if reflect.TypeOf(srcPB).AssignableTo(typeDesc) {
- return srcPB, nil
- }
- if reflect.TypeOf(o).AssignableTo(typeDesc) {
- return o, nil
- }
- switch typeDesc {
- case anyValueType:
- _, isAny := srcPB.(*anypb.Any)
- if isAny {
- return srcPB, nil
- }
- return anypb.New(srcPB)
- case jsonValueType:
- // Marshal the proto to JSON first, and then rehydrate as protobuf.Value as there is no
- // support for direct conversion from proto.Message to protobuf.Value.
- bytes, err := protojson.Marshal(srcPB)
- if err != nil {
- return nil, err
- }
- json := &structpb.Value{}
- err = protojson.Unmarshal(bytes, json)
- if err != nil {
- return nil, err
- }
- return json, nil
- default:
- if typeDesc == o.typeDesc.ReflectType() {
- return o.value, nil
- }
- if typeDesc.Kind() == reflect.Ptr {
- val := reflect.New(typeDesc.Elem()).Interface()
- dstPB, ok := val.(proto.Message)
- if ok {
- err := pb.Merge(dstPB, srcPB)
- if err != nil {
- return nil, fmt.Errorf("type conversion error: %v", err)
- }
- return dstPB, nil
- }
- }
- }
- return nil, fmt.Errorf("type conversion error from '%T' to '%v'", o.value, typeDesc)
-}
-
-func (o *protoObj) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- default:
- if o.Type().TypeName() == typeVal.TypeName() {
- return o
- }
- case TypeType:
- return o.typeValue
- }
- return NewErr("type conversion error from '%s' to '%s'", o.typeDesc.Name(), typeVal)
-}
-
-func (o *protoObj) Equal(other ref.Val) ref.Val {
- otherPB, ok := other.Value().(proto.Message)
- return Bool(ok && pb.Equal(o.value, otherPB))
-}
-
-// IsSet tests whether a field which is defined is set to a non-default value.
-func (o *protoObj) IsSet(field ref.Val) ref.Val {
- protoFieldName, ok := field.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(field)
- }
- protoFieldStr := string(protoFieldName)
- fd, found := o.typeDesc.FieldByName(protoFieldStr)
- if !found {
- return NewErr("no such field '%s'", field)
- }
- if fd.IsSet(o.value) {
- return True
- }
- return False
-}
-
-// IsZeroValue returns true if the protobuf object is empty.
-func (o *protoObj) IsZeroValue() bool {
- return proto.Equal(o.value, o.typeDesc.Zero())
-}
-
-func (o *protoObj) Get(index ref.Val) ref.Val {
- protoFieldName, ok := index.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(index)
- }
- protoFieldStr := string(protoFieldName)
- fd, found := o.typeDesc.FieldByName(protoFieldStr)
- if !found {
- return NewErr("no such field '%s'", index)
- }
- fv, err := fd.GetFrom(o.value)
- if err != nil {
- return NewErr(err.Error())
- }
- return o.NativeToValue(fv)
-}
-
-func (o *protoObj) Type() ref.Type {
- return o.typeValue.(ref.Type)
-}
-
-func (o *protoObj) Value() any {
- return o.value
-}
diff --git a/vendor/github.com/google/cel-go/common/types/optional.go b/vendor/github.com/google/cel-go/common/types/optional.go
deleted file mode 100644
index a9f30aed0..000000000
--- a/vendor/github.com/google/cel-go/common/types/optional.go
+++ /dev/null
@@ -1,108 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "errors"
- "fmt"
- "reflect"
-
- "github.com/google/cel-go/common/types/ref"
-)
-
-var (
- // OptionalType indicates the runtime type of an optional value.
- OptionalType = NewOpaqueType("optional")
-
- // OptionalNone is a sentinel value which is used to indicate an empty optional value.
- OptionalNone = &Optional{}
-)
-
-// OptionalOf returns an optional value which wraps a concrete CEL value.
-func OptionalOf(value ref.Val) *Optional {
- return &Optional{value: value}
-}
-
-// Optional value which points to a value if non-empty.
-type Optional struct {
- value ref.Val
-}
-
-// HasValue returns true if the optional has a value.
-func (o *Optional) HasValue() bool {
- return o.value != nil
-}
-
-// GetValue returns the wrapped value contained in the optional.
-func (o *Optional) GetValue() ref.Val {
- if !o.HasValue() {
- return NewErr("optional.none() dereference")
- }
- return o.value
-}
-
-// ConvertToNative implements the ref.Val interface method.
-func (o *Optional) ConvertToNative(typeDesc reflect.Type) (any, error) {
- if !o.HasValue() {
- return nil, errors.New("optional.none() dereference")
- }
- return o.value.ConvertToNative(typeDesc)
-}
-
-// ConvertToType implements the ref.Val interface method.
-func (o *Optional) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case OptionalType:
- return o
- case TypeType:
- return OptionalType
- }
- return NewErr("type conversion error from '%s' to '%s'", OptionalType, typeVal)
-}
-
-// Equal determines whether the values contained by two optional values are equal.
-func (o *Optional) Equal(other ref.Val) ref.Val {
- otherOpt, isOpt := other.(*Optional)
- if !isOpt {
- return False
- }
- if !o.HasValue() {
- return Bool(!otherOpt.HasValue())
- }
- if !otherOpt.HasValue() {
- return False
- }
- return o.value.Equal(otherOpt.value)
-}
-
-func (o *Optional) String() string {
- if o.HasValue() {
- return fmt.Sprintf("optional(%v)", o.GetValue())
- }
- return "optional.none()"
-}
-
-// Type implements the ref.Val interface method.
-func (o *Optional) Type() ref.Type {
- return OptionalType
-}
-
-// Value returns the underlying 'Value()' of the wrapped value, if present.
-func (o *Optional) Value() any {
- if o.value == nil {
- return nil
- }
- return o.value.Value()
-}
diff --git a/vendor/github.com/google/cel-go/common/types/overflow.go b/vendor/github.com/google/cel-go/common/types/overflow.go
deleted file mode 100644
index c68a92182..000000000
--- a/vendor/github.com/google/cel-go/common/types/overflow.go
+++ /dev/null
@@ -1,389 +0,0 @@
-// Copyright 2021 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "math"
- "time"
-)
-
-var (
- doubleTwoTo64 = math.Ldexp(1.0, 64)
-)
-
-// addInt64Checked performs addition with overflow detection of two int64 values.
-//
-// If the operation fails the error return value will be non-nil.
-func addInt64Checked(x, y int64) (int64, error) {
- if (y > 0 && x > math.MaxInt64-y) || (y < 0 && x < math.MinInt64-y) {
- return 0, errIntOverflow
- }
- return x + y, nil
-}
-
-// subtractInt64Checked performs subtraction with overflow detection of two int64 values.
-//
-// If the operation fails the error return value will be non-nil.
-func subtractInt64Checked(x, y int64) (int64, error) {
- if (y < 0 && x > math.MaxInt64+y) || (y > 0 && x < math.MinInt64+y) {
- return 0, errIntOverflow
- }
- return x - y, nil
-}
-
-// negateInt64Checked performs negation with overflow detection of an int64.
-//
-// If the operation fails the error return value will be non-nil.
-func negateInt64Checked(x int64) (int64, error) {
- // In twos complement, negating MinInt64 would result in a valid of MaxInt64+1.
- if x == math.MinInt64 {
- return 0, errIntOverflow
- }
- return -x, nil
-}
-
-// multiplyInt64Checked performs multiplication with overflow detection of two int64 value.
-//
-// If the operation fails the error return value will be non-nil.
-func multiplyInt64Checked(x, y int64) (int64, error) {
- // Detecting multiplication overflow is more complicated than the others. The first two detect
- // attempting to negate MinInt64, which would result in MaxInt64+1. The other four detect normal
- // overflow conditions.
- if (x == -1 && y == math.MinInt64) || (y == -1 && x == math.MinInt64) ||
- // x is positive, y is positive
- (x > 0 && y > 0 && x > math.MaxInt64/y) ||
- // x is positive, y is negative
- (x > 0 && y < 0 && y < math.MinInt64/x) ||
- // x is negative, y is positive
- (x < 0 && y > 0 && x < math.MinInt64/y) ||
- // x is negative, y is negative
- (x < 0 && y < 0 && y < math.MaxInt64/x) {
- return 0, errIntOverflow
- }
- return x * y, nil
-}
-
-// divideInt64Checked performs division with overflow detection of two int64 values,
-// as well as a division by zero check.
-//
-// If the operation fails the error return value will be non-nil.
-func divideInt64Checked(x, y int64) (int64, error) {
- // Division by zero.
- if y == 0 {
- return 0, errDivideByZero
- }
- // In twos complement, negating MinInt64 would result in a valid of MaxInt64+1.
- if x == math.MinInt64 && y == -1 {
- return 0, errIntOverflow
- }
- return x / y, nil
-}
-
-// moduloInt64Checked performs modulo with overflow detection of two int64 values
-// as well as a modulus by zero check.
-//
-// If the operation fails the error return value will be non-nil.
-func moduloInt64Checked(x, y int64) (int64, error) {
- // Modulus by zero.
- if y == 0 {
- return 0, errModulusByZero
- }
- // In twos complement, negating MinInt64 would result in a valid of MaxInt64+1.
- if x == math.MinInt64 && y == -1 {
- return 0, errIntOverflow
- }
- return x % y, nil
-}
-
-// addUint64Checked performs addition with overflow detection of two uint64 values.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func addUint64Checked(x, y uint64) (uint64, error) {
- if y > 0 && x > math.MaxUint64-y {
- return 0, errUintOverflow
- }
- return x + y, nil
-}
-
-// subtractUint64Checked performs subtraction with overflow detection of two uint64 values.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func subtractUint64Checked(x, y uint64) (uint64, error) {
- if y > x {
- return 0, errUintOverflow
- }
- return x - y, nil
-}
-
-// multiplyUint64Checked performs multiplication with overflow detection of two uint64 values.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func multiplyUint64Checked(x, y uint64) (uint64, error) {
- if y != 0 && x > math.MaxUint64/y {
- return 0, errUintOverflow
- }
- return x * y, nil
-}
-
-// divideUint64Checked performs division with a test for division by zero.
-//
-// If the operation fails the error return value will be non-nil.
-func divideUint64Checked(x, y uint64) (uint64, error) {
- if y == 0 {
- return 0, errDivideByZero
- }
- return x / y, nil
-}
-
-// moduloUint64Checked performs modulo with a test for modulus by zero.
-//
-// If the operation fails the error return value will be non-nil.
-func moduloUint64Checked(x, y uint64) (uint64, error) {
- if y == 0 {
- return 0, errModulusByZero
- }
- return x % y, nil
-}
-
-// addDurationChecked performs addition with overflow detection of two time.Durations.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func addDurationChecked(x, y time.Duration) (time.Duration, error) {
- val, err := addInt64Checked(int64(x), int64(y))
- if err != nil {
- return time.Duration(0), err
- }
- return time.Duration(val), nil
-}
-
-// subtractDurationChecked performs subtraction with overflow detection of two time.Durations.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func subtractDurationChecked(x, y time.Duration) (time.Duration, error) {
- val, err := subtractInt64Checked(int64(x), int64(y))
- if err != nil {
- return time.Duration(0), err
- }
- return time.Duration(val), nil
-}
-
-// negateDurationChecked performs negation with overflow detection of a time.Duration.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func negateDurationChecked(x time.Duration) (time.Duration, error) {
- val, err := negateInt64Checked(int64(x))
- if err != nil {
- return time.Duration(0), err
- }
- return time.Duration(val), nil
-}
-
-// addDurationChecked performs addition with overflow detection of a time.Time and time.Duration.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func addTimeDurationChecked(x time.Time, y time.Duration) (time.Time, error) {
- // This is tricky. A time is represented as (int64, int32) where the first is seconds and second
- // is nanoseconds. A duration is int64 representing nanoseconds. We cannot normalize time to int64
- // as it could potentially overflow. The only way to proceed is to break time and duration into
- // second and nanosecond components.
-
- // First we break time into its components by truncating and subtracting.
- sec1 := x.Truncate(time.Second).Unix() // Truncate to seconds.
- nsec1 := x.Sub(x.Truncate(time.Second)).Nanoseconds() // Get nanoseconds by truncating and subtracting.
-
- // Second we break duration into its components by dividing and modulo.
- sec2 := int64(y) / int64(time.Second) // Truncate to seconds.
- nsec2 := int64(y) % int64(time.Second) // Get remainder.
-
- // Add seconds first, detecting any overflow.
- sec, err := addInt64Checked(sec1, sec2)
- if err != nil {
- return time.Time{}, err
- }
- // Nanoseconds cannot overflow as time.Time normalizes them to [0, 999999999].
- nsec := nsec1 + nsec2
-
- // We need to normalize nanoseconds to be positive and carry extra nanoseconds to seconds.
- // Adapted from time.Unix(int64, int64).
- if nsec < 0 || nsec >= int64(time.Second) {
- // Add seconds.
- sec, err = addInt64Checked(sec, nsec/int64(time.Second))
- if err != nil {
- return time.Time{}, err
- }
-
- nsec -= (nsec / int64(time.Second)) * int64(time.Second)
- if nsec < 0 {
- // Subtract an extra second
- sec, err = addInt64Checked(sec, -1)
- if err != nil {
- return time.Time{}, err
- }
- nsec += int64(time.Second)
- }
- }
-
- // Check if the the number of seconds from Unix epoch is within our acceptable range.
- if sec < minUnixTime || sec > maxUnixTime {
- return time.Time{}, errTimestampOverflow
- }
-
- // Return resulting time and propagate time zone.
- return time.Unix(sec, nsec).In(x.Location()), nil
-}
-
-// subtractTimeChecked performs subtraction with overflow detection of two time.Time.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func subtractTimeChecked(x, y time.Time) (time.Duration, error) {
- // Similar to addTimeDurationOverflow() above.
-
- // First we break time into its components by truncating and subtracting.
- sec1 := x.Truncate(time.Second).Unix() // Truncate to seconds.
- nsec1 := x.Sub(x.Truncate(time.Second)).Nanoseconds() // Get nanoseconds by truncating and subtracting.
-
- // Second we break duration into its components by truncating and subtracting.
- sec2 := y.Truncate(time.Second).Unix() // Truncate to seconds.
- nsec2 := y.Sub(y.Truncate(time.Second)).Nanoseconds() // Get nanoseconds by truncating and subtracting.
-
- // Subtract seconds first, detecting any overflow.
- sec, err := subtractInt64Checked(sec1, sec2)
- if err != nil {
- return time.Duration(0), err
- }
-
- // Nanoseconds cannot overflow as time.Time normalizes them to [0, 999999999].
- nsec := nsec1 - nsec2
-
- // Scale seconds to nanoseconds detecting overflow.
- tsec, err := multiplyInt64Checked(sec, int64(time.Second))
- if err != nil {
- return time.Duration(0), err
- }
-
- // Lastly we need to add the two nanoseconds together.
- val, err := addInt64Checked(tsec, nsec)
- if err != nil {
- return time.Duration(0), err
- }
-
- return time.Duration(val), nil
-}
-
-// subtractTimeDurationChecked performs subtraction with overflow detection of a time.Time and
-// time.Duration.
-//
-// If the operation fails due to overflow the error return value will be non-nil.
-func subtractTimeDurationChecked(x time.Time, y time.Duration) (time.Time, error) {
- // The easiest way to implement this is to negate y and add them.
- // x - y = x + -y
- val, err := negateDurationChecked(y)
- if err != nil {
- return time.Time{}, err
- }
- return addTimeDurationChecked(x, val)
-}
-
-// doubleToInt64Checked converts a double to an int64 value.
-//
-// If the conversion fails due to overflow the error return value will be non-nil.
-func doubleToInt64Checked(v float64) (int64, error) {
- if math.IsInf(v, 0) || math.IsNaN(v) || v <= float64(math.MinInt64) || v >= float64(math.MaxInt64) {
- return 0, errIntOverflow
- }
- return int64(v), nil
-}
-
-// doubleToInt64Checked converts a double to a uint64 value.
-//
-// If the conversion fails due to overflow the error return value will be non-nil.
-func doubleToUint64Checked(v float64) (uint64, error) {
- if math.IsInf(v, 0) || math.IsNaN(v) || v < 0 || v >= doubleTwoTo64 {
- return 0, errUintOverflow
- }
- return uint64(v), nil
-}
-
-// int64ToUint64Checked converts an int64 to a uint64 value.
-//
-// If the conversion fails due to overflow the error return value will be non-nil.
-func int64ToUint64Checked(v int64) (uint64, error) {
- if v < 0 {
- return 0, errUintOverflow
- }
- return uint64(v), nil
-}
-
-// int64ToInt32Checked converts an int64 to an int32 value.
-//
-// If the conversion fails due to overflow the error return value will be non-nil.
-func int64ToInt32Checked(v int64) (int32, error) {
- if v < math.MinInt32 || v > math.MaxInt32 {
- return 0, errIntOverflow
- }
- return int32(v), nil
-}
-
-// uint64ToUint32Checked converts a uint64 to a uint32 value.
-//
-// If the conversion fails due to overflow the error return value will be non-nil.
-func uint64ToUint32Checked(v uint64) (uint32, error) {
- if v > math.MaxUint32 {
- return 0, errUintOverflow
- }
- return uint32(v), nil
-}
-
-// uint64ToInt64Checked converts a uint64 to an int64 value.
-//
-// If the conversion fails due to overflow the error return value will be non-nil.
-func uint64ToInt64Checked(v uint64) (int64, error) {
- if v > math.MaxInt64 {
- return 0, errIntOverflow
- }
- return int64(v), nil
-}
-
-func doubleToUint64Lossless(v float64) (uint64, bool) {
- u, err := doubleToUint64Checked(v)
- if err != nil {
- return 0, false
- }
- if float64(u) != v {
- return 0, false
- }
- return u, true
-}
-
-func doubleToInt64Lossless(v float64) (int64, bool) {
- i, err := doubleToInt64Checked(v)
- if err != nil {
- return 0, false
- }
- if float64(i) != v {
- return 0, false
- }
- return i, true
-}
-
-func int64ToUint64Lossless(v int64) (uint64, bool) {
- u, err := int64ToUint64Checked(v)
- return u, err == nil
-}
-
-func uint64ToInt64Lossless(v uint64) (int64, bool) {
- i, err := uint64ToInt64Checked(v)
- return i, err == nil
-}
diff --git a/vendor/github.com/google/cel-go/common/types/pb/BUILD.bazel b/vendor/github.com/google/cel-go/common/types/pb/BUILD.bazel
deleted file mode 100644
index e2b9d37b5..000000000
--- a/vendor/github.com/google/cel-go/common/types/pb/BUILD.bazel
+++ /dev/null
@@ -1,53 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "checked.go",
- "enum.go",
- "equal.go",
- "file.go",
- "pb.go",
- "type.go",
- ],
- importpath = "github.com/google/cel-go/common/types/pb",
- deps = [
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//encoding/protowire:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//reflect/protoreflect:go_default_library",
- "@org_golang_google_protobuf//reflect/protoregistry:go_default_library",
- "@org_golang_google_protobuf//types/dynamicpb:go_default_library",
- "@org_golang_google_protobuf//types/known/anypb:go_default_library",
- "@org_golang_google_protobuf//types/known/durationpb:go_default_library",
- "@org_golang_google_protobuf//types/known/emptypb:go_default_library",
- "@org_golang_google_protobuf//types/known/structpb:go_default_library",
- "@org_golang_google_protobuf//types/known/timestamppb:go_default_library",
- "@org_golang_google_protobuf//types/known/wrapperspb:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = [
- "equal_test.go",
- "file_test.go",
- "pb_test.go",
- "type_test.go",
- ],
- embed = [":go_default_library"],
- deps = [
- "//checker/decls:go_default_library",
- "//test/proto2pb:test_all_types_go_proto",
- "//test/proto3pb:test_all_types_go_proto",
- "@org_golang_google_protobuf//reflect/protodesc:go_default_library",
- "@org_golang_google_protobuf//reflect/protoreflect:go_default_library",
- "@org_golang_google_protobuf//types/descriptorpb:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/types/pb/checked.go b/vendor/github.com/google/cel-go/common/types/pb/checked.go
deleted file mode 100644
index 312a6a072..000000000
--- a/vendor/github.com/google/cel-go/common/types/pb/checked.go
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package pb
-
-import (
- "google.golang.org/protobuf/reflect/protoreflect"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
- emptypb "google.golang.org/protobuf/types/known/emptypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
-)
-
-var (
- // CheckedPrimitives map from proto field descriptor type to expr.Type.
- CheckedPrimitives = map[protoreflect.Kind]*exprpb.Type{
- protoreflect.BoolKind: checkedBool,
- protoreflect.BytesKind: checkedBytes,
- protoreflect.DoubleKind: checkedDouble,
- protoreflect.FloatKind: checkedDouble,
- protoreflect.Int32Kind: checkedInt,
- protoreflect.Int64Kind: checkedInt,
- protoreflect.Sint32Kind: checkedInt,
- protoreflect.Sint64Kind: checkedInt,
- protoreflect.Uint32Kind: checkedUint,
- protoreflect.Uint64Kind: checkedUint,
- protoreflect.Fixed32Kind: checkedUint,
- protoreflect.Fixed64Kind: checkedUint,
- protoreflect.Sfixed32Kind: checkedInt,
- protoreflect.Sfixed64Kind: checkedInt,
- protoreflect.StringKind: checkedString}
-
- // CheckedWellKnowns map from qualified proto type name to expr.Type for
- // well-known proto types.
- CheckedWellKnowns = map[string]*exprpb.Type{
- // Wrapper types.
- "google.protobuf.BoolValue": checkedWrap(checkedBool),
- "google.protobuf.BytesValue": checkedWrap(checkedBytes),
- "google.protobuf.DoubleValue": checkedWrap(checkedDouble),
- "google.protobuf.FloatValue": checkedWrap(checkedDouble),
- "google.protobuf.Int64Value": checkedWrap(checkedInt),
- "google.protobuf.Int32Value": checkedWrap(checkedInt),
- "google.protobuf.UInt64Value": checkedWrap(checkedUint),
- "google.protobuf.UInt32Value": checkedWrap(checkedUint),
- "google.protobuf.StringValue": checkedWrap(checkedString),
- // Well-known types.
- "google.protobuf.Any": checkedAny,
- "google.protobuf.Duration": checkedDuration,
- "google.protobuf.Timestamp": checkedTimestamp,
- // Json types.
- "google.protobuf.ListValue": checkedListDyn,
- "google.protobuf.NullValue": checkedNull,
- "google.protobuf.Struct": checkedMapStringDyn,
- "google.protobuf.Value": checkedDyn,
- }
-
- // common types
- checkedDyn = &exprpb.Type{TypeKind: &exprpb.Type_Dyn{Dyn: &emptypb.Empty{}}}
- // Wrapper and primitive types.
- checkedBool = checkedPrimitive(exprpb.Type_BOOL)
- checkedBytes = checkedPrimitive(exprpb.Type_BYTES)
- checkedDouble = checkedPrimitive(exprpb.Type_DOUBLE)
- checkedInt = checkedPrimitive(exprpb.Type_INT64)
- checkedString = checkedPrimitive(exprpb.Type_STRING)
- checkedUint = checkedPrimitive(exprpb.Type_UINT64)
- // Well-known type equivalents.
- checkedAny = checkedWellKnown(exprpb.Type_ANY)
- checkedDuration = checkedWellKnown(exprpb.Type_DURATION)
- checkedTimestamp = checkedWellKnown(exprpb.Type_TIMESTAMP)
- // Json-based type equivalents.
- checkedNull = &exprpb.Type{
- TypeKind: &exprpb.Type_Null{
- Null: structpb.NullValue_NULL_VALUE}}
- checkedListDyn = &exprpb.Type{
- TypeKind: &exprpb.Type_ListType_{
- ListType: &exprpb.Type_ListType{ElemType: checkedDyn}}}
- checkedMapStringDyn = &exprpb.Type{
- TypeKind: &exprpb.Type_MapType_{
- MapType: &exprpb.Type_MapType{
- KeyType: checkedString,
- ValueType: checkedDyn}}}
-)
diff --git a/vendor/github.com/google/cel-go/common/types/pb/enum.go b/vendor/github.com/google/cel-go/common/types/pb/enum.go
deleted file mode 100644
index 09a154630..000000000
--- a/vendor/github.com/google/cel-go/common/types/pb/enum.go
+++ /dev/null
@@ -1,44 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package pb
-
-import (
- "google.golang.org/protobuf/reflect/protoreflect"
-)
-
-// newEnumValueDescription produces an enum value description with the fully qualified enum value
-// name and the enum value descriptor.
-func newEnumValueDescription(name string, desc protoreflect.EnumValueDescriptor) *EnumValueDescription {
- return &EnumValueDescription{
- enumValueName: name,
- desc: desc,
- }
-}
-
-// EnumValueDescription maps a fully-qualified enum value name to its numeric value.
-type EnumValueDescription struct {
- enumValueName string
- desc protoreflect.EnumValueDescriptor
-}
-
-// Name returns the fully-qualified identifier name for the enum value.
-func (ed *EnumValueDescription) Name() string {
- return ed.enumValueName
-}
-
-// Value returns the (numeric) value of the enum.
-func (ed *EnumValueDescription) Value() int32 {
- return int32(ed.desc.Number())
-}
diff --git a/vendor/github.com/google/cel-go/common/types/pb/equal.go b/vendor/github.com/google/cel-go/common/types/pb/equal.go
deleted file mode 100644
index 76893d85e..000000000
--- a/vendor/github.com/google/cel-go/common/types/pb/equal.go
+++ /dev/null
@@ -1,206 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package pb
-
-import (
- "bytes"
- "reflect"
-
- "google.golang.org/protobuf/encoding/protowire"
- "google.golang.org/protobuf/proto"
- "google.golang.org/protobuf/reflect/protoreflect"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
-)
-
-// Equal returns whether two proto.Message instances are equal using the following criteria:
-//
-// - Messages must share the same instance of the type descriptor
-// - Known set fields are compared using semantics equality
-// - Bytes are compared using bytes.Equal
-// - Scalar values are compared with operator ==
-// - List and map types are equal if they have the same length and all elements are equal
-// - Messages are equal if they share the same descriptor and all set fields are equal
-// - Unknown fields are compared using byte equality
-// - NaN values are not equal to each other
-// - google.protobuf.Any values are unpacked before comparison
-// - If the type descriptor for a protobuf.Any cannot be found, byte equality is used rather than
-// semantic equality.
-//
-// This method of proto equality mirrors the behavior of the C++ protobuf MessageDifferencer
-// whereas the golang proto.Equal implementation mirrors the Java protobuf equals() methods
-// behaviors which needed to treat NaN values as equal due to Java semantics.
-func Equal(x, y proto.Message) bool {
- if x == nil || y == nil {
- return x == nil && y == nil
- }
- xRef := x.ProtoReflect()
- yRef := y.ProtoReflect()
- return equalMessage(xRef, yRef)
-}
-
-func equalMessage(mx, my protoreflect.Message) bool {
- // Note, the original proto.Equal upon which this implementation is based does not specifically handle the
- // case when both messages are invalid. It is assumed that the descriptors will be equal and that byte-wise
- // comparison will be used, though the semantics of validity are neither clear, nor promised within the
- // proto.Equal implementation.
- if mx.IsValid() != my.IsValid() || mx.Descriptor() != my.Descriptor() {
- return false
- }
-
- // This is an innovation on the default proto.Equal where protobuf.Any values are unpacked before comparison
- // as otherwise the Any values are compared by bytes rather than structurally.
- if isAny(mx) && isAny(my) {
- ax := mx.Interface().(*anypb.Any)
- ay := my.Interface().(*anypb.Any)
- // If the values are not the same type url, return false.
- if ax.GetTypeUrl() != ay.GetTypeUrl() {
- return false
- }
- // If the values are byte equal, then return true.
- if bytes.Equal(ax.GetValue(), ay.GetValue()) {
- return true
- }
- // Otherwise fall through to the semantic comparison of the any values.
- x, err := ax.UnmarshalNew()
- if err != nil {
- return false
- }
- y, err := ay.UnmarshalNew()
- if err != nil {
- return false
- }
- // Recursively compare the unwrapped messages to ensure nested Any values are unwrapped accordingly.
- return equalMessage(x.ProtoReflect(), y.ProtoReflect())
- }
-
- // Walk the set fields to determine field-wise equality
- nx := 0
- equal := true
- mx.Range(func(fd protoreflect.FieldDescriptor, vx protoreflect.Value) bool {
- nx++
- equal = my.Has(fd) && equalField(fd, vx, my.Get(fd))
- return equal
- })
- if !equal {
- return false
- }
- // Establish the count of set fields on message y
- ny := 0
- my.Range(func(protoreflect.FieldDescriptor, protoreflect.Value) bool {
- ny++
- return true
- })
- // If the number of set fields is not equal return false.
- if nx != ny {
- return false
- }
-
- return equalUnknown(mx.GetUnknown(), my.GetUnknown())
-}
-
-func equalField(fd protoreflect.FieldDescriptor, x, y protoreflect.Value) bool {
- switch {
- case fd.IsMap():
- return equalMap(fd, x.Map(), y.Map())
- case fd.IsList():
- return equalList(fd, x.List(), y.List())
- default:
- return equalValue(fd, x, y)
- }
-}
-
-func equalMap(fd protoreflect.FieldDescriptor, x, y protoreflect.Map) bool {
- if x.Len() != y.Len() {
- return false
- }
- equal := true
- x.Range(func(k protoreflect.MapKey, vx protoreflect.Value) bool {
- vy := y.Get(k)
- equal = y.Has(k) && equalValue(fd.MapValue(), vx, vy)
- return equal
- })
- return equal
-}
-
-func equalList(fd protoreflect.FieldDescriptor, x, y protoreflect.List) bool {
- if x.Len() != y.Len() {
- return false
- }
- for i := x.Len() - 1; i >= 0; i-- {
- if !equalValue(fd, x.Get(i), y.Get(i)) {
- return false
- }
- }
- return true
-}
-
-func equalValue(fd protoreflect.FieldDescriptor, x, y protoreflect.Value) bool {
- switch fd.Kind() {
- case protoreflect.BoolKind:
- return x.Bool() == y.Bool()
- case protoreflect.EnumKind:
- return x.Enum() == y.Enum()
- case protoreflect.Int32Kind, protoreflect.Sint32Kind,
- protoreflect.Int64Kind, protoreflect.Sint64Kind,
- protoreflect.Sfixed32Kind, protoreflect.Sfixed64Kind:
- return x.Int() == y.Int()
- case protoreflect.Uint32Kind, protoreflect.Uint64Kind,
- protoreflect.Fixed32Kind, protoreflect.Fixed64Kind:
- return x.Uint() == y.Uint()
- case protoreflect.FloatKind, protoreflect.DoubleKind:
- return x.Float() == y.Float()
- case protoreflect.StringKind:
- return x.String() == y.String()
- case protoreflect.BytesKind:
- return bytes.Equal(x.Bytes(), y.Bytes())
- case protoreflect.MessageKind, protoreflect.GroupKind:
- return equalMessage(x.Message(), y.Message())
- default:
- return x.Interface() == y.Interface()
- }
-}
-
-func equalUnknown(x, y protoreflect.RawFields) bool {
- lenX := len(x)
- lenY := len(y)
- if lenX != lenY {
- return false
- }
- if lenX == 0 {
- return true
- }
- if bytes.Equal([]byte(x), []byte(y)) {
- return true
- }
-
- mx := make(map[protoreflect.FieldNumber]protoreflect.RawFields)
- my := make(map[protoreflect.FieldNumber]protoreflect.RawFields)
- for len(x) > 0 {
- fnum, _, n := protowire.ConsumeField(x)
- mx[fnum] = append(mx[fnum], x[:n]...)
- x = x[n:]
- }
- for len(y) > 0 {
- fnum, _, n := protowire.ConsumeField(y)
- my[fnum] = append(my[fnum], y[:n]...)
- y = y[n:]
- }
- return reflect.DeepEqual(mx, my)
-}
-
-func isAny(m protoreflect.Message) bool {
- return string(m.Descriptor().FullName()) == "google.protobuf.Any"
-}
diff --git a/vendor/github.com/google/cel-go/common/types/pb/file.go b/vendor/github.com/google/cel-go/common/types/pb/file.go
deleted file mode 100644
index e323afb1d..000000000
--- a/vendor/github.com/google/cel-go/common/types/pb/file.go
+++ /dev/null
@@ -1,202 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package pb
-
-import (
- "fmt"
-
- "google.golang.org/protobuf/reflect/protoreflect"
-
- dynamicpb "google.golang.org/protobuf/types/dynamicpb"
-)
-
-// newFileDescription returns a FileDescription instance with a complete listing of all the message
-// types and enum values, as well as a map of extensions declared within any scope in the file.
-func newFileDescription(fileDesc protoreflect.FileDescriptor, pbdb *Db) (*FileDescription, extensionMap) {
- metadata := collectFileMetadata(fileDesc)
- enums := make(map[string]*EnumValueDescription)
- for name, enumVal := range metadata.enumValues {
- enums[name] = newEnumValueDescription(name, enumVal)
- }
- types := make(map[string]*TypeDescription)
- for name, msgType := range metadata.msgTypes {
- types[name] = newTypeDescription(name, msgType, pbdb.extensions)
- }
- fileExtMap := make(extensionMap)
- for typeName, extensions := range metadata.msgExtensionMap {
- messageExtMap, found := fileExtMap[typeName]
- if !found {
- messageExtMap = make(map[string]*FieldDescription)
- }
- for _, ext := range extensions {
- extDesc := dynamicpb.NewExtensionType(ext).TypeDescriptor()
- messageExtMap[string(ext.FullName())] = newFieldDescription(extDesc)
- }
- fileExtMap[typeName] = messageExtMap
- }
- return &FileDescription{
- name: fileDesc.Path(),
- types: types,
- enums: enums,
- }, fileExtMap
-}
-
-// FileDescription holds a map of all types and enum values declared within a proto file.
-type FileDescription struct {
- name string
- types map[string]*TypeDescription
- enums map[string]*EnumValueDescription
-}
-
-// Copy creates a copy of the FileDescription with updated Db references within its types.
-func (fd *FileDescription) Copy(pbdb *Db) *FileDescription {
- typesCopy := make(map[string]*TypeDescription, len(fd.types))
- for k, v := range fd.types {
- typesCopy[k] = v.Copy(pbdb)
- }
- return &FileDescription{
- name: fd.name,
- types: typesCopy,
- enums: fd.enums,
- }
-}
-
-// GetName returns the fully qualified file path for the file.
-func (fd *FileDescription) GetName() string {
- return fd.name
-}
-
-// GetEnumDescription returns an EnumDescription for a qualified enum value
-// name declared within the .proto file.
-func (fd *FileDescription) GetEnumDescription(enumName string) (*EnumValueDescription, bool) {
- ed, found := fd.enums[sanitizeProtoName(enumName)]
- return ed, found
-}
-
-// GetEnumNames returns the string names of all enum values in the file.
-func (fd *FileDescription) GetEnumNames() []string {
- enumNames := make([]string, len(fd.enums))
- i := 0
- for _, e := range fd.enums {
- enumNames[i] = e.Name()
- i++
- }
- return enumNames
-}
-
-// GetTypeDescription returns a TypeDescription for a qualified protobuf message type name
-// declared within the .proto file.
-func (fd *FileDescription) GetTypeDescription(typeName string) (*TypeDescription, bool) {
- td, found := fd.types[sanitizeProtoName(typeName)]
- return td, found
-}
-
-// GetTypeNames returns the list of all type names contained within the file.
-func (fd *FileDescription) GetTypeNames() []string {
- typeNames := make([]string, len(fd.types))
- i := 0
- for _, t := range fd.types {
- typeNames[i] = t.Name()
- i++
- }
- return typeNames
-}
-
-// sanitizeProtoName strips the leading '.' from the proto message name.
-func sanitizeProtoName(name string) string {
- if name != "" && name[0] == '.' {
- return name[1:]
- }
- return name
-}
-
-// fileMetadata is a flattened view of message types and enum values within a file descriptor.
-type fileMetadata struct {
- // msgTypes maps from fully-qualified message name to descriptor.
- msgTypes map[string]protoreflect.MessageDescriptor
- // enumValues maps from fully-qualified enum value to enum value descriptor.
- enumValues map[string]protoreflect.EnumValueDescriptor
- // msgExtensionMap maps from the protobuf message name being extended to a set of extensions
- // for the type.
- msgExtensionMap map[string][]protoreflect.ExtensionDescriptor
-
- // TODO: support enum type definitions for use in future type-check enhancements.
-}
-
-// collectFileMetadata traverses the proto file object graph to collect message types and enum
-// values and index them by their fully qualified names.
-func collectFileMetadata(fileDesc protoreflect.FileDescriptor) *fileMetadata {
- msgTypes := make(map[string]protoreflect.MessageDescriptor)
- enumValues := make(map[string]protoreflect.EnumValueDescriptor)
- msgExtensionMap := make(map[string][]protoreflect.ExtensionDescriptor)
- collectMsgTypes(fileDesc.Messages(), msgTypes, enumValues, msgExtensionMap)
- collectEnumValues(fileDesc.Enums(), enumValues)
- collectExtensions(fileDesc.Extensions(), msgExtensionMap)
- return &fileMetadata{
- msgTypes: msgTypes,
- enumValues: enumValues,
- msgExtensionMap: msgExtensionMap,
- }
-}
-
-// collectMsgTypes recursively collects messages, nested messages, and nested enums into a map of
-// fully qualified protobuf names to descriptors.
-func collectMsgTypes(msgTypes protoreflect.MessageDescriptors,
- msgTypeMap map[string]protoreflect.MessageDescriptor,
- enumValueMap map[string]protoreflect.EnumValueDescriptor,
- msgExtensionMap map[string][]protoreflect.ExtensionDescriptor) {
- for i := 0; i < msgTypes.Len(); i++ {
- msgType := msgTypes.Get(i)
- msgTypeMap[string(msgType.FullName())] = msgType
- nestedMsgTypes := msgType.Messages()
- if nestedMsgTypes.Len() != 0 {
- collectMsgTypes(nestedMsgTypes, msgTypeMap, enumValueMap, msgExtensionMap)
- }
- nestedEnumTypes := msgType.Enums()
- if nestedEnumTypes.Len() != 0 {
- collectEnumValues(nestedEnumTypes, enumValueMap)
- }
- nestedExtensions := msgType.Extensions()
- if nestedExtensions.Len() != 0 {
- collectExtensions(nestedExtensions, msgExtensionMap)
- }
- }
-}
-
-// collectEnumValues accumulates the enum values within an enum declaration.
-func collectEnumValues(enumTypes protoreflect.EnumDescriptors, enumValueMap map[string]protoreflect.EnumValueDescriptor) {
- for i := 0; i < enumTypes.Len(); i++ {
- enumType := enumTypes.Get(i)
- enumTypeValues := enumType.Values()
- for j := 0; j < enumTypeValues.Len(); j++ {
- enumValue := enumTypeValues.Get(j)
- enumValueName := fmt.Sprintf("%s.%s", string(enumType.FullName()), string(enumValue.Name()))
- enumValueMap[enumValueName] = enumValue
- }
- }
-}
-
-func collectExtensions(extensions protoreflect.ExtensionDescriptors, msgExtensionMap map[string][]protoreflect.ExtensionDescriptor) {
- for i := 0; i < extensions.Len(); i++ {
- ext := extensions.Get(i)
- extendsMsg := string(ext.ContainingMessage().FullName())
- msgExts, found := msgExtensionMap[extendsMsg]
- if !found {
- msgExts = []protoreflect.ExtensionDescriptor{}
- }
- msgExts = append(msgExts, ext)
- msgExtensionMap[extendsMsg] = msgExts
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/types/pb/pb.go b/vendor/github.com/google/cel-go/common/types/pb/pb.go
deleted file mode 100644
index eadebcb04..000000000
--- a/vendor/github.com/google/cel-go/common/types/pb/pb.go
+++ /dev/null
@@ -1,258 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package pb reflects over protocol buffer descriptors to generate objects
-// that simplify type, enum, and field lookup.
-package pb
-
-import (
- "fmt"
-
- "google.golang.org/protobuf/proto"
- "google.golang.org/protobuf/reflect/protoreflect"
- "google.golang.org/protobuf/reflect/protoregistry"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- durpb "google.golang.org/protobuf/types/known/durationpb"
- emptypb "google.golang.org/protobuf/types/known/emptypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- tspb "google.golang.org/protobuf/types/known/timestamppb"
- wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
-)
-
-// Db maps from file / message / enum name to file description.
-//
-// Each Db is isolated from each other, and while information about protobuf descriptors may be
-// fetched from the global protobuf registry, no descriptors are added to this registry, else
-// the isolation guarantees of the Db object would be violated.
-type Db struct {
- revFileDescriptorMap map[string]*FileDescription
- // files contains the deduped set of FileDescriptions whose types are contained in the pb.Db.
- files []*FileDescription
- // extensions contains the mapping between a given type name, extension name and its FieldDescription
- extensions map[string]map[string]*FieldDescription
-}
-
-// extensionsMap is a type alias to a map[typeName]map[extensionName]*FieldDescription
-type extensionMap = map[string]map[string]*FieldDescription
-
-var (
- // DefaultDb used at evaluation time or unless overridden at check time.
- DefaultDb = &Db{
- revFileDescriptorMap: make(map[string]*FileDescription),
- files: []*FileDescription{},
- extensions: make(extensionMap),
- }
-)
-
-// Merge will copy the source proto message into the destination, or error if the merge cannot be completed.
-//
-// Unlike the proto.Merge, this method will fallback to proto.Marshal/Unmarshal of the two proto messages do not
-// share the same instance of their type descriptor.
-func Merge(dstPB, srcPB proto.Message) error {
- src, dst := srcPB.ProtoReflect(), dstPB.ProtoReflect()
- if src.Descriptor() == dst.Descriptor() {
- proto.Merge(dstPB, srcPB)
- return nil
- }
- if src.Descriptor().FullName() != dst.Descriptor().FullName() {
- return fmt.Errorf("pb.Merge() arguments must be the same type. got: %v, %v",
- dst.Descriptor().FullName(), src.Descriptor().FullName())
- }
- bytes, err := proto.Marshal(srcPB)
- if err != nil {
- return fmt.Errorf("pb.Merge(dstPB, srcPB) failed to marshal source proto: %v", err)
- }
- err = proto.Unmarshal(bytes, dstPB)
- if err != nil {
- return fmt.Errorf("pb.Merge(dstPB, srcPB) failed to unmarshal to dest proto: %v", err)
- }
- return nil
-}
-
-// NewDb creates a new `pb.Db` with an empty type name to file description map.
-func NewDb() *Db {
- pbdb := &Db{
- revFileDescriptorMap: make(map[string]*FileDescription),
- files: []*FileDescription{},
- extensions: make(extensionMap),
- }
- // The FileDescription objects in the default db contain lazily initialized TypeDescription
- // values which may point to the state contained in the DefaultDb irrespective of this shallow
- // copy; however, the type graph for a field is idempotently computed, and is guaranteed to
- // only be initialized once thanks to atomic values within the TypeDescription objects, so it
- // is safe to share these values across instances.
- for k, v := range DefaultDb.revFileDescriptorMap {
- pbdb.revFileDescriptorMap[k] = v
- }
- pbdb.files = append(pbdb.files, DefaultDb.files...)
- return pbdb
-}
-
-// Copy creates a copy of the current database with its own internal descriptor mapping.
-func (pbdb *Db) Copy() *Db {
- copy := NewDb()
- for _, fd := range pbdb.files {
- hasFile := false
- for _, fd2 := range copy.files {
- if fd2 == fd {
- hasFile = true
- }
- }
- if !hasFile {
- fd = fd.Copy(copy)
- copy.files = append(copy.files, fd)
- }
- for _, enumValName := range fd.GetEnumNames() {
- copy.revFileDescriptorMap[enumValName] = fd
- }
- for _, msgTypeName := range fd.GetTypeNames() {
- copy.revFileDescriptorMap[msgTypeName] = fd
- }
- copy.revFileDescriptorMap[fd.GetName()] = fd
- }
- for typeName, extFieldMap := range pbdb.extensions {
- copyExtFieldMap, found := copy.extensions[typeName]
- if !found {
- copyExtFieldMap = make(map[string]*FieldDescription, len(extFieldMap))
- }
- for extFieldName, fd := range extFieldMap {
- copyExtFieldMap[extFieldName] = fd
- }
- copy.extensions[typeName] = copyExtFieldMap
- }
- return copy
-}
-
-// FileDescriptions returns the set of file descriptions associated with this db.
-func (pbdb *Db) FileDescriptions() []*FileDescription {
- return pbdb.files
-}
-
-// RegisterDescriptor produces a `FileDescription` from a `FileDescriptor` and registers the
-// message and enum types into the `pb.Db`.
-func (pbdb *Db) RegisterDescriptor(fileDesc protoreflect.FileDescriptor) (*FileDescription, error) {
- fd, found := pbdb.revFileDescriptorMap[fileDesc.Path()]
- if found {
- return fd, nil
- }
- // Make sure to search the global registry to see if a protoreflect.FileDescriptor for
- // the file specified has been linked into the binary. If so, use the copy of the descriptor
- // from the global cache.
- //
- // Note: Proto reflection relies on descriptor values being object equal rather than object
- // equivalence. This choice means that a FieldDescriptor generated from a FileDescriptorProto
- // will be incompatible with the FieldDescriptor in the global registry and any message created
- // from that global registry.
- globalFD, err := protoregistry.GlobalFiles.FindFileByPath(fileDesc.Path())
- if err == nil {
- fileDesc = globalFD
- }
- var fileExtMap extensionMap
- fd, fileExtMap = newFileDescription(fileDesc, pbdb)
- for _, enumValName := range fd.GetEnumNames() {
- pbdb.revFileDescriptorMap[enumValName] = fd
- }
- for _, msgTypeName := range fd.GetTypeNames() {
- pbdb.revFileDescriptorMap[msgTypeName] = fd
- }
- pbdb.revFileDescriptorMap[fd.GetName()] = fd
-
- // Return the specific file descriptor registered.
- pbdb.files = append(pbdb.files, fd)
-
- // Index the protobuf message extensions from the file into the pbdb
- for typeName, extMap := range fileExtMap {
- typeExtMap, found := pbdb.extensions[typeName]
- if !found {
- pbdb.extensions[typeName] = extMap
- continue
- }
- for extName, field := range extMap {
- typeExtMap[extName] = field
- }
- }
- return fd, nil
-}
-
-// RegisterMessage produces a `FileDescription` from a `message` and registers the message and all
-// other definitions within the message file into the `pb.Db`.
-func (pbdb *Db) RegisterMessage(message proto.Message) (*FileDescription, error) {
- msgDesc := message.ProtoReflect().Descriptor()
- msgName := msgDesc.FullName()
- typeName := sanitizeProtoName(string(msgName))
- if fd, found := pbdb.revFileDescriptorMap[typeName]; found {
- return fd, nil
- }
- return pbdb.RegisterDescriptor(msgDesc.ParentFile())
-}
-
-// DescribeEnum takes a qualified enum name and returns an `EnumDescription` if it exists in the
-// `pb.Db`.
-func (pbdb *Db) DescribeEnum(enumName string) (*EnumValueDescription, bool) {
- enumName = sanitizeProtoName(enumName)
- if fd, found := pbdb.revFileDescriptorMap[enumName]; found {
- return fd.GetEnumDescription(enumName)
- }
- return nil, false
-}
-
-// DescribeType returns a `TypeDescription` for the `typeName` if it exists in the `pb.Db`.
-func (pbdb *Db) DescribeType(typeName string) (*TypeDescription, bool) {
- typeName = sanitizeProtoName(typeName)
- if fd, found := pbdb.revFileDescriptorMap[typeName]; found {
- return fd.GetTypeDescription(typeName)
- }
- return nil, false
-}
-
-// CollectFileDescriptorSet builds a file descriptor set associated with the file where the input
-// message is declared.
-func CollectFileDescriptorSet(message proto.Message) map[string]protoreflect.FileDescriptor {
- fdMap := map[string]protoreflect.FileDescriptor{}
- parentFile := message.ProtoReflect().Descriptor().ParentFile()
- fdMap[parentFile.Path()] = parentFile
- // Initialize list of dependencies
- deps := make([]protoreflect.FileImport, parentFile.Imports().Len())
- for i := 0; i < parentFile.Imports().Len(); i++ {
- deps[i] = parentFile.Imports().Get(i)
- }
- // Expand list for new dependencies
- for i := 0; i < len(deps); i++ {
- dep := deps[i]
- if _, found := fdMap[dep.Path()]; found {
- continue
- }
- fdMap[dep.Path()] = dep.FileDescriptor
- for j := 0; j < dep.FileDescriptor.Imports().Len(); j++ {
- deps = append(deps, dep.FileDescriptor.Imports().Get(j))
- }
- }
- return fdMap
-}
-
-func init() {
- // Describe well-known types to ensure they can always be resolved by the check and interpret
- // execution phases.
- //
- // The following subset of message types is enough to ensure that all well-known types can
- // resolved in the runtime, since describing the value results in describing the whole file
- // where the message is declared.
- DefaultDb.RegisterMessage(&anypb.Any{})
- DefaultDb.RegisterMessage(&durpb.Duration{})
- DefaultDb.RegisterMessage(&emptypb.Empty{})
- DefaultDb.RegisterMessage(&tspb.Timestamp{})
- DefaultDb.RegisterMessage(&structpb.Value{})
- DefaultDb.RegisterMessage(&wrapperspb.BoolValue{})
-}
diff --git a/vendor/github.com/google/cel-go/common/types/pb/type.go b/vendor/github.com/google/cel-go/common/types/pb/type.go
deleted file mode 100644
index 6cc95c276..000000000
--- a/vendor/github.com/google/cel-go/common/types/pb/type.go
+++ /dev/null
@@ -1,587 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package pb
-
-import (
- "fmt"
- "reflect"
-
- "google.golang.org/protobuf/proto"
- "google.golang.org/protobuf/reflect/protoreflect"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
- dynamicpb "google.golang.org/protobuf/types/dynamicpb"
- anypb "google.golang.org/protobuf/types/known/anypb"
- dpb "google.golang.org/protobuf/types/known/durationpb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- tpb "google.golang.org/protobuf/types/known/timestamppb"
- wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
-)
-
-// description is a private interface used to make it convenient to perform type unwrapping at
-// the TypeDescription or FieldDescription level.
-type description interface {
- // Zero returns an empty immutable protobuf message when the description is a protobuf message
- // type.
- Zero() proto.Message
-}
-
-// newTypeDescription produces a TypeDescription value for the fully-qualified proto type name
-// with a given descriptor.
-func newTypeDescription(typeName string, desc protoreflect.MessageDescriptor, extensions extensionMap) *TypeDescription {
- msgType := dynamicpb.NewMessageType(desc)
- msgZero := dynamicpb.NewMessage(desc)
- fieldMap := map[string]*FieldDescription{}
- fields := desc.Fields()
- for i := 0; i < fields.Len(); i++ {
- f := fields.Get(i)
- fieldMap[string(f.Name())] = newFieldDescription(f)
- }
- return &TypeDescription{
- typeName: typeName,
- desc: desc,
- msgType: msgType,
- fieldMap: fieldMap,
- extensions: extensions,
- reflectType: reflectTypeOf(msgZero),
- zeroMsg: zeroValueOf(msgZero),
- }
-}
-
-// TypeDescription is a collection of type metadata relevant to expression
-// checking and evaluation.
-type TypeDescription struct {
- typeName string
- desc protoreflect.MessageDescriptor
- msgType protoreflect.MessageType
- fieldMap map[string]*FieldDescription
- extensions extensionMap
- reflectType reflect.Type
- zeroMsg proto.Message
-}
-
-// Copy copies the type description with updated references to the Db.
-func (td *TypeDescription) Copy(pbdb *Db) *TypeDescription {
- return &TypeDescription{
- typeName: td.typeName,
- desc: td.desc,
- msgType: td.msgType,
- fieldMap: td.fieldMap,
- extensions: pbdb.extensions,
- reflectType: td.reflectType,
- zeroMsg: td.zeroMsg,
- }
-}
-
-// FieldMap returns a string field name to FieldDescription map.
-func (td *TypeDescription) FieldMap() map[string]*FieldDescription {
- return td.fieldMap
-}
-
-// FieldByName returns (FieldDescription, true) if the field name is declared within the type.
-func (td *TypeDescription) FieldByName(name string) (*FieldDescription, bool) {
- fd, found := td.fieldMap[name]
- if found {
- return fd, true
- }
- extFieldMap, found := td.extensions[td.typeName]
- if !found {
- return nil, false
- }
- fd, found = extFieldMap[name]
- return fd, found
-}
-
-// MaybeUnwrap accepts a proto message as input and unwraps it to a primitive CEL type if possible.
-//
-// This method returns the unwrapped value and 'true', else the original value and 'false'.
-func (td *TypeDescription) MaybeUnwrap(msg proto.Message) (any, bool, error) {
- return unwrap(td, msg)
-}
-
-// Name returns the fully-qualified name of the type.
-func (td *TypeDescription) Name() string {
- return string(td.desc.FullName())
-}
-
-// New returns a mutable proto message
-func (td *TypeDescription) New() protoreflect.Message {
- return td.msgType.New()
-}
-
-// ReflectType returns the Golang reflect.Type for this type.
-func (td *TypeDescription) ReflectType() reflect.Type {
- return td.reflectType
-}
-
-// Zero returns the zero proto.Message value for this type.
-func (td *TypeDescription) Zero() proto.Message {
- return td.zeroMsg
-}
-
-// newFieldDescription creates a new field description from a protoreflect.FieldDescriptor.
-func newFieldDescription(fieldDesc protoreflect.FieldDescriptor) *FieldDescription {
- var reflectType reflect.Type
- var zeroMsg proto.Message
- switch fieldDesc.Kind() {
- case protoreflect.EnumKind:
- reflectType = reflectTypeOf(protoreflect.EnumNumber(0))
- case protoreflect.GroupKind, protoreflect.MessageKind:
- zeroMsg = dynamicpb.NewMessage(fieldDesc.Message())
- reflectType = reflectTypeOf(zeroMsg)
- default:
- reflectType = reflectTypeOf(fieldDesc.Default().Interface())
- if fieldDesc.IsList() {
- var elemValue protoreflect.Value
- if fieldDesc.IsExtension() {
- et := dynamicpb.NewExtensionType(fieldDesc)
- elemValue = et.New().List().NewElement()
- } else {
- parentMsgType := fieldDesc.ContainingMessage()
- parentMsg := dynamicpb.NewMessage(parentMsgType)
- listField := parentMsg.NewField(fieldDesc).List()
- elemValue = listField.NewElement()
- }
- elem := elemValue.Interface()
- switch elemType := elem.(type) {
- case protoreflect.Message:
- elem = elemType.Interface()
- }
- reflectType = reflectTypeOf(elem)
- }
- }
- // Ensure the list type is appropriately reflected as a Go-native list.
- if fieldDesc.IsList() {
- reflectType = reflect.SliceOf(reflectType)
- }
- var keyType, valType *FieldDescription
- if fieldDesc.IsMap() {
- keyType = newFieldDescription(fieldDesc.MapKey())
- valType = newFieldDescription(fieldDesc.MapValue())
- }
- return &FieldDescription{
- desc: fieldDesc,
- KeyType: keyType,
- ValueType: valType,
- reflectType: reflectType,
- zeroMsg: zeroValueOf(zeroMsg),
- }
-}
-
-// FieldDescription holds metadata related to fields declared within a type.
-type FieldDescription struct {
- // KeyType holds the key FieldDescription for map fields.
- KeyType *FieldDescription
- // ValueType holds the value FieldDescription for map fields.
- ValueType *FieldDescription
-
- desc protoreflect.FieldDescriptor
- reflectType reflect.Type
- zeroMsg proto.Message
-}
-
-// CheckedType returns the type-definition used at type-check time.
-func (fd *FieldDescription) CheckedType() *exprpb.Type {
- if fd.desc.IsMap() {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_MapType_{
- MapType: &exprpb.Type_MapType{
- KeyType: fd.KeyType.typeDefToType(),
- ValueType: fd.ValueType.typeDefToType(),
- },
- },
- }
- }
- if fd.desc.IsList() {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_ListType_{
- ListType: &exprpb.Type_ListType{
- ElemType: fd.typeDefToType()}}}
- }
- return fd.typeDefToType()
-}
-
-// Descriptor returns the protoreflect.FieldDescriptor for this type.
-func (fd *FieldDescription) Descriptor() protoreflect.FieldDescriptor {
- return fd.desc
-}
-
-// IsSet returns whether the field is set on the target value, per the proto presence conventions
-// of proto2 or proto3 accordingly.
-//
-// This function implements the FieldType.IsSet function contract which can be used to operate on
-// more than just protobuf field accesses; however, the target here must be a protobuf.Message.
-func (fd *FieldDescription) IsSet(target any) bool {
- switch v := target.(type) {
- case proto.Message:
- pbRef := v.ProtoReflect()
- pbDesc := pbRef.Descriptor()
- if pbDesc == fd.desc.ContainingMessage() {
- // When the target protobuf shares the same message descriptor instance as the field
- // descriptor, use the cached field descriptor value.
- return pbRef.Has(fd.desc)
- }
- // Otherwise, fallback to a dynamic lookup of the field descriptor from the target
- // instance as an attempt to use the cached field descriptor will result in a panic.
- return pbRef.Has(pbDesc.Fields().ByName(protoreflect.Name(fd.Name())))
- default:
- return false
- }
-}
-
-// GetFrom returns the accessor method associated with the field on the proto generated struct.
-//
-// If the field is not set, the proto default value is returned instead.
-//
-// This function implements the FieldType.GetFrom function contract which can be used to operate
-// on more than just protobuf field accesses; however, the target here must be a protobuf.Message.
-func (fd *FieldDescription) GetFrom(target any) (any, error) {
- v, ok := target.(proto.Message)
- if !ok {
- return nil, fmt.Errorf("unsupported field selection target: (%T)%v", target, target)
- }
- pbRef := v.ProtoReflect()
- pbDesc := pbRef.Descriptor()
- var fieldVal any
- if pbDesc == fd.desc.ContainingMessage() {
- // When the target protobuf shares the same message descriptor instance as the field
- // descriptor, use the cached field descriptor value.
- fieldVal = pbRef.Get(fd.desc).Interface()
- } else {
- // Otherwise, fallback to a dynamic lookup of the field descriptor from the target
- // instance as an attempt to use the cached field descriptor will result in a panic.
- fieldVal = pbRef.Get(pbDesc.Fields().ByName(protoreflect.Name(fd.Name()))).Interface()
- }
- switch fv := fieldVal.(type) {
- // Fast-path return for primitive types.
- case bool, []byte, float32, float64, int32, int64, string, uint32, uint64, protoreflect.List:
- return fv, nil
- case protoreflect.EnumNumber:
- return int64(fv), nil
- case protoreflect.Map:
- // Return a wrapper around the protobuf-reflected Map types which carries additional
- // information about the key and value definitions of the map.
- return &Map{Map: fv, KeyType: fd.KeyType, ValueType: fd.ValueType}, nil
- case protoreflect.Message:
- // Make sure to unwrap well-known protobuf types before returning.
- unwrapped, _, err := fd.MaybeUnwrapDynamic(fv)
- return unwrapped, err
- default:
- return fv, nil
- }
-}
-
-// IsEnum returns true if the field type refers to an enum value.
-func (fd *FieldDescription) IsEnum() bool {
- return fd.ProtoKind() == protoreflect.EnumKind
-}
-
-// IsMap returns true if the field is of map type.
-func (fd *FieldDescription) IsMap() bool {
- return fd.desc.IsMap()
-}
-
-// IsMessage returns true if the field is of message type.
-func (fd *FieldDescription) IsMessage() bool {
- kind := fd.ProtoKind()
- return kind == protoreflect.MessageKind || kind == protoreflect.GroupKind
-}
-
-// IsOneof returns true if the field is declared within a oneof block.
-func (fd *FieldDescription) IsOneof() bool {
- return fd.desc.ContainingOneof() != nil
-}
-
-// IsList returns true if the field is a repeated value.
-//
-// This method will also return true for map values, so check whether the
-// field is also a map.
-func (fd *FieldDescription) IsList() bool {
- return fd.desc.IsList()
-}
-
-// MaybeUnwrapDynamic takes the reflected protoreflect.Message and determines whether the
-// value can be unwrapped to a more primitive CEL type.
-//
-// This function returns the unwrapped value and 'true' on success, or the original value
-// and 'false' otherwise.
-func (fd *FieldDescription) MaybeUnwrapDynamic(msg protoreflect.Message) (any, bool, error) {
- return unwrapDynamic(fd, msg)
-}
-
-// Name returns the CamelCase name of the field within the proto-based struct.
-func (fd *FieldDescription) Name() string {
- return string(fd.desc.Name())
-}
-
-// ProtoKind returns the protobuf reflected kind of the field.
-func (fd *FieldDescription) ProtoKind() protoreflect.Kind {
- return fd.desc.Kind()
-}
-
-// ReflectType returns the Golang reflect.Type for this field.
-func (fd *FieldDescription) ReflectType() reflect.Type {
- return fd.reflectType
-}
-
-// String returns the fully qualified name of the field within its type as well as whether the
-// field occurs within a oneof.
-func (fd *FieldDescription) String() string {
- return fmt.Sprintf("%v.%s `oneof=%t`", fd.desc.ContainingMessage().FullName(), fd.Name(), fd.IsOneof())
-}
-
-// Zero returns the zero value for the protobuf message represented by this field.
-//
-// If the field is not a proto.Message type, the zero value is nil.
-func (fd *FieldDescription) Zero() proto.Message {
- return fd.zeroMsg
-}
-
-func (fd *FieldDescription) typeDefToType() *exprpb.Type {
- if fd.IsMessage() {
- msgType := string(fd.desc.Message().FullName())
- if wk, found := CheckedWellKnowns[msgType]; found {
- return wk
- }
- return checkedMessageType(msgType)
- }
- if fd.IsEnum() {
- return checkedInt
- }
- return CheckedPrimitives[fd.ProtoKind()]
-}
-
-// Map wraps the protoreflect.Map object with a key and value FieldDescription for use in
-// retrieving individual elements within CEL value data types.
-type Map struct {
- protoreflect.Map
- KeyType *FieldDescription
- ValueType *FieldDescription
-}
-
-func checkedMessageType(name string) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_MessageType{MessageType: name}}
-}
-
-func checkedPrimitive(primitive exprpb.Type_PrimitiveType) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_Primitive{Primitive: primitive}}
-}
-
-func checkedWellKnown(wellKnown exprpb.Type_WellKnownType) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_WellKnown{WellKnown: wellKnown}}
-}
-
-func checkedWrap(t *exprpb.Type) *exprpb.Type {
- return &exprpb.Type{
- TypeKind: &exprpb.Type_Wrapper{Wrapper: t.GetPrimitive()}}
-}
-
-// unwrap unwraps the provided proto.Message value, potentially based on the description if the
-// input message is a *dynamicpb.Message which obscures the typing information from Go.
-//
-// Returns the unwrapped value and 'true' if unwrapped, otherwise the input value and 'false'.
-func unwrap(desc description, msg proto.Message) (any, bool, error) {
- switch v := msg.(type) {
- case *anypb.Any:
- dynMsg, err := v.UnmarshalNew()
- if err != nil {
- return v, false, err
- }
- return unwrapDynamic(desc, dynMsg.ProtoReflect())
- case *dynamicpb.Message:
- return unwrapDynamic(desc, v)
- case *dpb.Duration:
- return v.AsDuration(), true, nil
- case *tpb.Timestamp:
- return v.AsTime(), true, nil
- case *structpb.Value:
- switch v.GetKind().(type) {
- case *structpb.Value_BoolValue:
- return v.GetBoolValue(), true, nil
- case *structpb.Value_ListValue:
- return v.GetListValue(), true, nil
- case *structpb.Value_NullValue:
- return structpb.NullValue_NULL_VALUE, true, nil
- case *structpb.Value_NumberValue:
- return v.GetNumberValue(), true, nil
- case *structpb.Value_StringValue:
- return v.GetStringValue(), true, nil
- case *structpb.Value_StructValue:
- return v.GetStructValue(), true, nil
- default:
- return structpb.NullValue_NULL_VALUE, true, nil
- }
- case *wrapperspb.BoolValue:
- return v.GetValue(), true, nil
- case *wrapperspb.BytesValue:
- return v.GetValue(), true, nil
- case *wrapperspb.DoubleValue:
- return v.GetValue(), true, nil
- case *wrapperspb.FloatValue:
- return float64(v.GetValue()), true, nil
- case *wrapperspb.Int32Value:
- return int64(v.GetValue()), true, nil
- case *wrapperspb.Int64Value:
- return v.GetValue(), true, nil
- case *wrapperspb.StringValue:
- return v.GetValue(), true, nil
- case *wrapperspb.UInt32Value:
- return uint64(v.GetValue()), true, nil
- case *wrapperspb.UInt64Value:
- return v.GetValue(), true, nil
- }
- return msg, false, nil
-}
-
-// unwrapDynamic unwraps a reflected protobuf Message value.
-//
-// Returns the unwrapped value and 'true' if unwrapped, otherwise the input value and 'false'.
-func unwrapDynamic(desc description, refMsg protoreflect.Message) (any, bool, error) {
- msg := refMsg.Interface()
- if !refMsg.IsValid() {
- msg = desc.Zero()
- }
- // In order to ensure that these wrapped types match the expectations of the CEL type system
- // the dynamicpb.Message must be merged with an protobuf instance of the well-known type value.
- typeName := string(refMsg.Descriptor().FullName())
- switch typeName {
- case "google.protobuf.Any":
- // Note, Any values require further unwrapping; however, this unwrapping may or may not
- // be to a well-known type. If the unwrapped value is a well-known type it will be further
- // unwrapped before being returned to the caller. Otherwise, the dynamic protobuf object
- // represented by the Any will be returned.
- unwrappedAny := &anypb.Any{}
- err := Merge(unwrappedAny, msg)
- if err != nil {
- return nil, false, fmt.Errorf("unwrap dynamic field failed: %v", err)
- }
- dynMsg, err := unwrappedAny.UnmarshalNew()
- if err != nil {
- // Allow the error to move further up the stack as it should result in an type
- // conversion error if the caller does not recover it somehow.
- return nil, false, fmt.Errorf("unmarshal dynamic any failed: %v", err)
- }
- // Attempt to unwrap the dynamic type, otherwise return the dynamic message.
- unwrapped, nested, err := unwrapDynamic(desc, dynMsg.ProtoReflect())
- if err == nil && nested {
- return unwrapped, true, nil
- }
- return dynMsg, true, err
- case "google.protobuf.BoolValue",
- "google.protobuf.BytesValue",
- "google.protobuf.DoubleValue",
- "google.protobuf.FloatValue",
- "google.protobuf.Int32Value",
- "google.protobuf.Int64Value",
- "google.protobuf.StringValue",
- "google.protobuf.UInt32Value",
- "google.protobuf.UInt64Value":
- // The msg value is ignored when dealing with wrapper types as they have a null or value
- // behavior, rather than the standard zero value behavior of other proto message types.
- if !refMsg.IsValid() {
- return structpb.NullValue_NULL_VALUE, true, nil
- }
- valueField := refMsg.Descriptor().Fields().ByName("value")
- return refMsg.Get(valueField).Interface(), true, nil
- case "google.protobuf.Duration":
- unwrapped := &dpb.Duration{}
- err := Merge(unwrapped, msg)
- if err != nil {
- return nil, false, err
- }
- return unwrapped.AsDuration(), true, nil
- case "google.protobuf.ListValue":
- unwrapped := &structpb.ListValue{}
- err := Merge(unwrapped, msg)
- if err != nil {
- return nil, false, err
- }
- return unwrapped, true, nil
- case "google.protobuf.NullValue":
- return structpb.NullValue_NULL_VALUE, true, nil
- case "google.protobuf.Struct":
- unwrapped := &structpb.Struct{}
- err := Merge(unwrapped, msg)
- if err != nil {
- return nil, false, err
- }
- return unwrapped, true, nil
- case "google.protobuf.Timestamp":
- unwrapped := &tpb.Timestamp{}
- err := Merge(unwrapped, msg)
- if err != nil {
- return nil, false, err
- }
- return unwrapped.AsTime(), true, nil
- case "google.protobuf.Value":
- unwrapped := &structpb.Value{}
- err := Merge(unwrapped, msg)
- if err != nil {
- return nil, false, err
- }
- return unwrap(desc, unwrapped)
- }
- return msg, false, nil
-}
-
-// reflectTypeOf intercepts the reflect.Type call to ensure that dynamicpb.Message types preserve
-// well-known protobuf reflected types expected by the CEL type system.
-func reflectTypeOf(val any) reflect.Type {
- switch v := val.(type) {
- case proto.Message:
- return reflect.TypeOf(zeroValueOf(v))
- default:
- return reflect.TypeOf(v)
- }
-}
-
-// zeroValueOf will return the strongest possible proto.Message representing the default protobuf
-// message value of the input msg type.
-func zeroValueOf(msg proto.Message) proto.Message {
- if msg == nil {
- return nil
- }
- typeName := string(msg.ProtoReflect().Descriptor().FullName())
- zeroVal, found := zeroValueMap[typeName]
- if found {
- return zeroVal
- }
- return msg
-}
-
-var (
- jsonValueTypeURL = "types.googleapis.com/google.protobuf.Value"
-
- zeroValueMap = map[string]proto.Message{
- "google.protobuf.Any": &anypb.Any{TypeUrl: jsonValueTypeURL},
- "google.protobuf.Duration": &dpb.Duration{},
- "google.protobuf.ListValue": &structpb.ListValue{},
- "google.protobuf.Struct": &structpb.Struct{},
- "google.protobuf.Timestamp": &tpb.Timestamp{},
- "google.protobuf.Value": &structpb.Value{},
- "google.protobuf.BoolValue": wrapperspb.Bool(false),
- "google.protobuf.BytesValue": wrapperspb.Bytes([]byte{}),
- "google.protobuf.DoubleValue": wrapperspb.Double(0.0),
- "google.protobuf.FloatValue": wrapperspb.Float(0.0),
- "google.protobuf.Int32Value": wrapperspb.Int32(0),
- "google.protobuf.Int64Value": wrapperspb.Int64(0),
- "google.protobuf.StringValue": wrapperspb.String(""),
- "google.protobuf.UInt32Value": wrapperspb.UInt32(0),
- "google.protobuf.UInt64Value": wrapperspb.UInt64(0),
- }
-)
diff --git a/vendor/github.com/google/cel-go/common/types/provider.go b/vendor/github.com/google/cel-go/common/types/provider.go
deleted file mode 100644
index d301aa38a..000000000
--- a/vendor/github.com/google/cel-go/common/types/provider.go
+++ /dev/null
@@ -1,734 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
- "time"
-
- "google.golang.org/protobuf/proto"
- "google.golang.org/protobuf/reflect/protoreflect"
-
- "github.com/google/cel-go/common/types/pb"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
- anypb "google.golang.org/protobuf/types/known/anypb"
- dpb "google.golang.org/protobuf/types/known/durationpb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- tpb "google.golang.org/protobuf/types/known/timestamppb"
-)
-
-// Adapter converts native Go values of varying type and complexity to equivalent CEL values.
-type Adapter = ref.TypeAdapter
-
-// Provider specifies functions for creating new object instances and for resolving
-// enum values by name.
-type Provider interface {
- // EnumValue returns the numeric value of the given enum value name.
- EnumValue(enumName string) ref.Val
-
- // FindIdent takes a qualified identifier name and returns a ref.Val if one exists.
- FindIdent(identName string) (ref.Val, bool)
-
- // FindStructType returns the Type give a qualified type name.
- //
- // For historical reasons, only struct types are expected to be returned through this
- // method, and the type values are expected to be wrapped in a TypeType instance using
- // TypeTypeWithParam().
- //
- // Returns false if not found.
- FindStructType(structType string) (*Type, bool)
-
- // FindStructFieldNames returns thet field names associated with the type, if the type
- // is found.
- FindStructFieldNames(structType string) ([]string, bool)
-
- // FieldStructFieldType returns the field type for a checked type value. Returns
- // false if the field could not be found.
- FindStructFieldType(structType, fieldName string) (*FieldType, bool)
-
- // NewValue creates a new type value from a qualified name and map of field
- // name to value.
- //
- // Note, for each value, the Val.ConvertToNative function will be invoked
- // to convert the Val to the field's native type. If an error occurs during
- // conversion, the NewValue will be a types.Err.
- NewValue(structType string, fields map[string]ref.Val) ref.Val
-}
-
-// FieldType represents a field's type value and whether that field supports presence detection.
-type FieldType struct {
- // Type of the field as a CEL native type value.
- Type *Type
-
- // IsSet indicates whether the field is set on an input object.
- IsSet ref.FieldTester
-
- // GetFrom retrieves the field value on the input object, if set.
- GetFrom ref.FieldGetter
-}
-
-// Registry provides type information for a set of registered types.
-type Registry struct {
- revTypeMap map[string]*Type
- pbdb *pb.Db
-}
-
-// NewRegistry accepts a list of proto message instances and returns a type
-// provider which can create new instances of the provided message or any
-// message that proto depends upon in its FileDescriptor.
-func NewRegistry(types ...proto.Message) (*Registry, error) {
- p := &Registry{
- revTypeMap: make(map[string]*Type),
- pbdb: pb.NewDb(),
- }
- err := p.RegisterType(
- BoolType,
- BytesType,
- DoubleType,
- DurationType,
- IntType,
- ListType,
- MapType,
- NullType,
- StringType,
- TimestampType,
- TypeType,
- UintType)
- if err != nil {
- return nil, err
- }
- // This block ensures that the well-known protobuf types are registered by default.
- for _, fd := range p.pbdb.FileDescriptions() {
- err = p.registerAllTypes(fd)
- if err != nil {
- return nil, err
- }
- }
- for _, msgType := range types {
- err = p.RegisterMessage(msgType)
- if err != nil {
- return nil, err
- }
- }
- return p, nil
-}
-
-// NewEmptyRegistry returns a registry which is completely unconfigured.
-func NewEmptyRegistry() *Registry {
- return &Registry{
- revTypeMap: make(map[string]*Type),
- pbdb: pb.NewDb(),
- }
-}
-
-// Copy copies the current state of the registry into its own memory space.
-func (p *Registry) Copy() *Registry {
- copy := &Registry{
- revTypeMap: make(map[string]*Type),
- pbdb: p.pbdb.Copy(),
- }
- for k, v := range p.revTypeMap {
- copy.revTypeMap[k] = v
- }
- return copy
-}
-
-// EnumValue returns the numeric value of the given enum value name.
-func (p *Registry) EnumValue(enumName string) ref.Val {
- enumVal, found := p.pbdb.DescribeEnum(enumName)
- if !found {
- return NewErr("unknown enum name '%s'", enumName)
- }
- return Int(enumVal.Value())
-}
-
-// FindFieldType returns the field type for a checked type value. Returns false if
-// the field could not be found.
-//
-// Deprecated: use FindStructFieldType
-func (p *Registry) FindFieldType(structType, fieldName string) (*ref.FieldType, bool) {
- msgType, found := p.pbdb.DescribeType(structType)
- if !found {
- return nil, false
- }
- field, found := msgType.FieldByName(fieldName)
- if !found {
- return nil, false
- }
- return &ref.FieldType{
- Type: field.CheckedType(),
- IsSet: field.IsSet,
- GetFrom: field.GetFrom}, true
-}
-
-// FindStructFieldNames returns the set of field names for the given struct type,
-// if the type exists in the registry.
-func (p *Registry) FindStructFieldNames(structType string) ([]string, bool) {
- msgType, found := p.pbdb.DescribeType(structType)
- if !found {
- return []string{}, false
- }
- fieldMap := msgType.FieldMap()
- fields := make([]string, len(fieldMap))
- idx := 0
- for f := range fieldMap {
- fields[idx] = f
- idx++
- }
- return fields, true
-}
-
-// FindStructFieldType returns the field type for a checked type value. Returns
-// false if the field could not be found.
-func (p *Registry) FindStructFieldType(structType, fieldName string) (*FieldType, bool) {
- msgType, found := p.pbdb.DescribeType(structType)
- if !found {
- return nil, false
- }
- field, found := msgType.FieldByName(fieldName)
- if !found {
- return nil, false
- }
- return &FieldType{
- Type: fieldDescToCELType(field),
- IsSet: field.IsSet,
- GetFrom: field.GetFrom}, true
-}
-
-// FindIdent takes a qualified identifier name and returns a ref.Val if one exists.
-func (p *Registry) FindIdent(identName string) (ref.Val, bool) {
- if t, found := p.revTypeMap[identName]; found {
- return t, true
- }
- if enumVal, found := p.pbdb.DescribeEnum(identName); found {
- return Int(enumVal.Value()), true
- }
- return nil, false
-}
-
-// FindType looks up the Type given a qualified typeName. Returns false if not found.
-//
-// Deprecated: use FindStructType
-func (p *Registry) FindType(structType string) (*exprpb.Type, bool) {
- if _, found := p.pbdb.DescribeType(structType); !found {
- return nil, false
- }
- if structType != "" && structType[0] == '.' {
- structType = structType[1:]
- }
- return &exprpb.Type{
- TypeKind: &exprpb.Type_Type{
- Type: &exprpb.Type{
- TypeKind: &exprpb.Type_MessageType{
- MessageType: structType}}}}, true
-}
-
-// FindStructType returns the Type give a qualified type name.
-//
-// For historical reasons, only struct types are expected to be returned through this
-// method, and the type values are expected to be wrapped in a TypeType instance using
-// TypeTypeWithParam().
-//
-// Returns false if not found.
-func (p *Registry) FindStructType(structType string) (*Type, bool) {
- if _, found := p.pbdb.DescribeType(structType); !found {
- return nil, false
- }
- if structType != "" && structType[0] == '.' {
- structType = structType[1:]
- }
- return NewTypeTypeWithParam(NewObjectType(structType)), true
-}
-
-// NewValue creates a new type value from a qualified name and map of field
-// name to value.
-//
-// Note, for each value, the Val.ConvertToNative function will be invoked
-// to convert the Val to the field's native type. If an error occurs during
-// conversion, the NewValue will be a types.Err.
-func (p *Registry) NewValue(structType string, fields map[string]ref.Val) ref.Val {
- td, found := p.pbdb.DescribeType(structType)
- if !found {
- return NewErr("unknown type '%s'", structType)
- }
- msg := td.New()
- fieldMap := td.FieldMap()
- for name, value := range fields {
- field, found := fieldMap[name]
- if !found {
- return NewErr("no such field: %s", name)
- }
- err := msgSetField(msg, field, value)
- if err != nil {
- return &Err{err}
- }
- }
- return p.NativeToValue(msg.Interface())
-}
-
-// RegisterDescriptor registers the contents of a protocol buffer `FileDescriptor`.
-func (p *Registry) RegisterDescriptor(fileDesc protoreflect.FileDescriptor) error {
- fd, err := p.pbdb.RegisterDescriptor(fileDesc)
- if err != nil {
- return err
- }
- return p.registerAllTypes(fd)
-}
-
-// RegisterMessage registers a protocol buffer message and its dependencies.
-func (p *Registry) RegisterMessage(message proto.Message) error {
- fd, err := p.pbdb.RegisterMessage(message)
- if err != nil {
- return err
- }
- return p.registerAllTypes(fd)
-}
-
-// RegisterType registers a type value with the provider which ensures the provider is aware of how to
-// map the type to an identifier.
-//
-// If the `ref.Type` value is a `*types.Type` it will be registered directly by its runtime type name.
-// If the `ref.Type` value is not a `*types.Type` instance, a `*types.Type` instance which reflects the
-// traits present on the input and the runtime type name. By default this foreign type will be treated
-// as a types.StructKind. To avoid potential issues where the `ref.Type` values does not match the
-// generated `*types.Type` instance, consider always using the `*types.Type` to represent type extensions
-// to CEL, even when they're not based on protobuf types.
-func (p *Registry) RegisterType(types ...ref.Type) error {
- for _, t := range types {
- celType := maybeForeignType(t)
- existing, found := p.revTypeMap[t.TypeName()]
- if !found {
- p.revTypeMap[t.TypeName()] = celType
- continue
- }
- if !existing.IsEquivalentType(celType) {
- return fmt.Errorf("type registration conflict. found: %v, input: %v", existing, celType)
- }
- if existing.traitMask != celType.traitMask {
- return fmt.Errorf(
- "type registered with conflicting traits: %v with traits %v, input: %v",
- existing.TypeName(), existing.traitMask, celType.traitMask)
- }
- }
- return nil
-}
-
-// NativeToValue converts various "native" types to ref.Val with this specific implementation
-// providing support for custom proto-based types.
-//
-// This method should be the inverse of ref.Val.ConvertToNative.
-func (p *Registry) NativeToValue(value any) ref.Val {
- if val, found := nativeToValue(p, value); found {
- return val
- }
- switch v := value.(type) {
- case proto.Message:
- typeName := string(v.ProtoReflect().Descriptor().FullName())
- td, found := p.pbdb.DescribeType(typeName)
- if !found {
- return NewErr("unknown type: '%s'", typeName)
- }
- unwrapped, isUnwrapped, err := td.MaybeUnwrap(v)
- if err != nil {
- return UnsupportedRefValConversionErr(v)
- }
- if isUnwrapped {
- return p.NativeToValue(unwrapped)
- }
- typeVal, found := p.FindIdent(typeName)
- if !found {
- return NewErr("unknown type: '%s'", typeName)
- }
- return NewObject(p, td, typeVal, v)
- case *pb.Map:
- return NewProtoMap(p, v)
- case protoreflect.List:
- return NewProtoList(p, v)
- case protoreflect.Message:
- return p.NativeToValue(v.Interface())
- case protoreflect.Value:
- return p.NativeToValue(v.Interface())
- }
- return UnsupportedRefValConversionErr(value)
-}
-
-func (p *Registry) registerAllTypes(fd *pb.FileDescription) error {
- for _, typeName := range fd.GetTypeNames() {
- // skip well-known type names since they're automatically sanitized
- // during NewObjectType() calls.
- if _, found := checkedWellKnowns[typeName]; found {
- continue
- }
- err := p.RegisterType(NewObjectTypeValue(typeName))
- if err != nil {
- return err
- }
- }
- return nil
-}
-
-func fieldDescToCELType(field *pb.FieldDescription) *Type {
- if field.IsMap() {
- return NewMapType(
- singularFieldDescToCELType(field.KeyType),
- singularFieldDescToCELType(field.ValueType))
- }
- if field.IsList() {
- return NewListType(singularFieldDescToCELType(field))
- }
- return singularFieldDescToCELType(field)
-}
-
-func singularFieldDescToCELType(field *pb.FieldDescription) *Type {
- if field.IsMessage() {
- return NewObjectType(string(field.Descriptor().Message().FullName()))
- }
- if field.IsEnum() {
- return IntType
- }
- return ProtoCELPrimitives[field.ProtoKind()]
-}
-
-// defaultTypeAdapter converts go native types to CEL values.
-type defaultTypeAdapter struct{}
-
-var (
- // DefaultTypeAdapter adapts canonical CEL types from their equivalent Go values.
- DefaultTypeAdapter = &defaultTypeAdapter{}
-)
-
-// NativeToValue implements the ref.TypeAdapter interface.
-func (a *defaultTypeAdapter) NativeToValue(value any) ref.Val {
- if val, found := nativeToValue(a, value); found {
- return val
- }
- return UnsupportedRefValConversionErr(value)
-}
-
-// nativeToValue returns the converted (ref.Val, true) of a conversion is found,
-// otherwise (nil, false)
-func nativeToValue(a Adapter, value any) (ref.Val, bool) {
- switch v := value.(type) {
- case nil:
- return NullValue, true
- case *Bool:
- if v != nil {
- return *v, true
- }
- case *Bytes:
- if v != nil {
- return *v, true
- }
- case *Double:
- if v != nil {
- return *v, true
- }
- case *Int:
- if v != nil {
- return *v, true
- }
- case *String:
- if v != nil {
- return *v, true
- }
- case *Uint:
- if v != nil {
- return *v, true
- }
- case bool:
- return Bool(v), true
- case int:
- return Int(v), true
- case int32:
- return Int(v), true
- case int64:
- return Int(v), true
- case uint:
- return Uint(v), true
- case uint32:
- return Uint(v), true
- case uint64:
- return Uint(v), true
- case float32:
- return Double(v), true
- case float64:
- return Double(v), true
- case string:
- return String(v), true
- case *dpb.Duration:
- return Duration{Duration: v.AsDuration()}, true
- case time.Duration:
- return Duration{Duration: v}, true
- case *tpb.Timestamp:
- return Timestamp{Time: v.AsTime()}, true
- case time.Time:
- return Timestamp{Time: v}, true
- case *bool:
- if v != nil {
- return Bool(*v), true
- }
- case *float32:
- if v != nil {
- return Double(*v), true
- }
- case *float64:
- if v != nil {
- return Double(*v), true
- }
- case *int:
- if v != nil {
- return Int(*v), true
- }
- case *int32:
- if v != nil {
- return Int(*v), true
- }
- case *int64:
- if v != nil {
- return Int(*v), true
- }
- case *string:
- if v != nil {
- return String(*v), true
- }
- case *uint:
- if v != nil {
- return Uint(*v), true
- }
- case *uint32:
- if v != nil {
- return Uint(*v), true
- }
- case *uint64:
- if v != nil {
- return Uint(*v), true
- }
- case []byte:
- return Bytes(v), true
- // specializations for common lists types.
- case []string:
- return NewStringList(a, v), true
- case []ref.Val:
- return NewRefValList(a, v), true
- // specializations for common map types.
- case map[string]string:
- return NewStringStringMap(a, v), true
- case map[string]any:
- return NewStringInterfaceMap(a, v), true
- case map[ref.Val]ref.Val:
- return NewRefValMap(a, v), true
- // additional specializations may be added upon request / need.
- case *anypb.Any:
- if v == nil {
- return UnsupportedRefValConversionErr(v), true
- }
- unpackedAny, err := v.UnmarshalNew()
- if err != nil {
- return NewErr("anypb.UnmarshalNew() failed for type %q: %v", v.GetTypeUrl(), err), true
- }
- return a.NativeToValue(unpackedAny), true
- case *structpb.NullValue, structpb.NullValue:
- return NullValue, true
- case *structpb.ListValue:
- return NewJSONList(a, v), true
- case *structpb.Struct:
- return NewJSONStruct(a, v), true
- case ref.Val:
- return v, true
- case protoreflect.EnumNumber:
- return Int(v), true
- case proto.Message:
- if v == nil {
- return UnsupportedRefValConversionErr(v), true
- }
- typeName := string(v.ProtoReflect().Descriptor().FullName())
- td, found := pb.DefaultDb.DescribeType(typeName)
- if !found {
- return nil, false
- }
- val, unwrapped, err := td.MaybeUnwrap(v)
- if err != nil {
- return UnsupportedRefValConversionErr(v), true
- }
- if !unwrapped {
- return nil, false
- }
- return a.NativeToValue(val), true
- // Note: dynamicpb.Message implements the proto.Message _and_ protoreflect.Message interfaces
- // which means that this case must appear after handling a proto.Message type.
- case protoreflect.Message:
- return a.NativeToValue(v.Interface()), true
- default:
- refValue := reflect.ValueOf(v)
- if refValue.Kind() == reflect.Ptr {
- if refValue.IsNil() {
- return UnsupportedRefValConversionErr(v), true
- }
- refValue = refValue.Elem()
- }
- refKind := refValue.Kind()
- switch refKind {
- case reflect.Array, reflect.Slice:
- return NewDynamicList(a, v), true
- case reflect.Map:
- return NewDynamicMap(a, v), true
- // type aliases of primitive types cannot be asserted as that type, but rather need
- // to be downcast to int32 before being converted to a CEL representation.
- case reflect.Int32:
- intType := reflect.TypeOf(int32(0))
- return Int(refValue.Convert(intType).Interface().(int32)), true
- case reflect.Int64:
- intType := reflect.TypeOf(int64(0))
- return Int(refValue.Convert(intType).Interface().(int64)), true
- case reflect.Uint32:
- uintType := reflect.TypeOf(uint32(0))
- return Uint(refValue.Convert(uintType).Interface().(uint32)), true
- case reflect.Uint64:
- uintType := reflect.TypeOf(uint64(0))
- return Uint(refValue.Convert(uintType).Interface().(uint64)), true
- case reflect.Float32:
- doubleType := reflect.TypeOf(float32(0))
- return Double(refValue.Convert(doubleType).Interface().(float32)), true
- case reflect.Float64:
- doubleType := reflect.TypeOf(float64(0))
- return Double(refValue.Convert(doubleType).Interface().(float64)), true
- }
- }
- return nil, false
-}
-
-func msgSetField(target protoreflect.Message, field *pb.FieldDescription, val ref.Val) error {
- if field.IsList() {
- lv := target.NewField(field.Descriptor())
- list, ok := val.(traits.Lister)
- if !ok {
- return unsupportedTypeConversionError(field, val)
- }
- err := msgSetListField(lv.List(), field, list)
- if err != nil {
- return err
- }
- target.Set(field.Descriptor(), lv)
- return nil
- }
- if field.IsMap() {
- mv := target.NewField(field.Descriptor())
- mp, ok := val.(traits.Mapper)
- if !ok {
- return unsupportedTypeConversionError(field, val)
- }
- err := msgSetMapField(mv.Map(), field, mp)
- if err != nil {
- return err
- }
- target.Set(field.Descriptor(), mv)
- return nil
- }
- v, err := val.ConvertToNative(field.ReflectType())
- if err != nil {
- return fieldTypeConversionError(field, err)
- }
- if v == nil {
- return nil
- }
- switch pv := v.(type) {
- case proto.Message:
- v = pv.ProtoReflect()
- }
- target.Set(field.Descriptor(), protoreflect.ValueOf(v))
- return nil
-}
-
-func msgSetListField(target protoreflect.List, listField *pb.FieldDescription, listVal traits.Lister) error {
- elemReflectType := listField.ReflectType().Elem()
- for i := Int(0); i < listVal.Size().(Int); i++ {
- elem := listVal.Get(i)
- elemVal, err := elem.ConvertToNative(elemReflectType)
- if err != nil {
- return fieldTypeConversionError(listField, err)
- }
- if elemVal == nil {
- continue
- }
- switch ev := elemVal.(type) {
- case proto.Message:
- elemVal = ev.ProtoReflect()
- }
- target.Append(protoreflect.ValueOf(elemVal))
- }
- return nil
-}
-
-func msgSetMapField(target protoreflect.Map, mapField *pb.FieldDescription, mapVal traits.Mapper) error {
- targetKeyType := mapField.KeyType.ReflectType()
- targetValType := mapField.ValueType.ReflectType()
- it := mapVal.Iterator()
- for it.HasNext() == True {
- key := it.Next()
- val := mapVal.Get(key)
- k, err := key.ConvertToNative(targetKeyType)
- if err != nil {
- return fieldTypeConversionError(mapField, err)
- }
- v, err := val.ConvertToNative(targetValType)
- if err != nil {
- return fieldTypeConversionError(mapField, err)
- }
- if v == nil {
- continue
- }
- switch pv := v.(type) {
- case proto.Message:
- v = pv.ProtoReflect()
- }
- target.Set(protoreflect.ValueOf(k).MapKey(), protoreflect.ValueOf(v))
- }
- return nil
-}
-
-func unsupportedTypeConversionError(field *pb.FieldDescription, val ref.Val) error {
- msgName := field.Descriptor().ContainingMessage().FullName()
- return fmt.Errorf("unsupported field type for %v.%v: %v", msgName, field.Name(), val.Type())
-}
-
-func fieldTypeConversionError(field *pb.FieldDescription, err error) error {
- msgName := field.Descriptor().ContainingMessage().FullName()
- return fmt.Errorf("field type conversion error for %v.%v value type: %v", msgName, field.Name(), err)
-}
-
-var (
- // ProtoCELPrimitives provides a map from the protoreflect Kind to the equivalent CEL type.
- ProtoCELPrimitives = map[protoreflect.Kind]*Type{
- protoreflect.BoolKind: BoolType,
- protoreflect.BytesKind: BytesType,
- protoreflect.DoubleKind: DoubleType,
- protoreflect.FloatKind: DoubleType,
- protoreflect.Int32Kind: IntType,
- protoreflect.Int64Kind: IntType,
- protoreflect.Sint32Kind: IntType,
- protoreflect.Sint64Kind: IntType,
- protoreflect.Uint32Kind: UintType,
- protoreflect.Uint64Kind: UintType,
- protoreflect.Fixed32Kind: UintType,
- protoreflect.Fixed64Kind: UintType,
- protoreflect.Sfixed32Kind: IntType,
- protoreflect.Sfixed64Kind: IntType,
- protoreflect.StringKind: StringType,
- }
-)
diff --git a/vendor/github.com/google/cel-go/common/types/ref/BUILD.bazel b/vendor/github.com/google/cel-go/common/types/ref/BUILD.bazel
deleted file mode 100644
index 79330c332..000000000
--- a/vendor/github.com/google/cel-go/common/types/ref/BUILD.bazel
+++ /dev/null
@@ -1,20 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "provider.go",
- "reference.go",
- ],
- importpath = "github.com/google/cel-go/common/types/ref",
- deps = [
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//reflect/protoreflect:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/types/ref/provider.go b/vendor/github.com/google/cel-go/common/types/ref/provider.go
deleted file mode 100644
index b9820023d..000000000
--- a/vendor/github.com/google/cel-go/common/types/ref/provider.go
+++ /dev/null
@@ -1,102 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package ref
-
-import (
- "google.golang.org/protobuf/proto"
- "google.golang.org/protobuf/reflect/protoreflect"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// TypeProvider specifies functions for creating new object instances and for
-// resolving enum values by name.
-//
-// Deprecated: use types.Provider
-type TypeProvider interface {
- // EnumValue returns the numeric value of the given enum value name.
- EnumValue(enumName string) Val
-
- // FindIdent takes a qualified identifier name and returns a Value if one exists.
- FindIdent(identName string) (Val, bool)
-
- // FindType looks up the Type given a qualified typeName. Returns false if not found.
- FindType(typeName string) (*exprpb.Type, bool)
-
- // FieldFieldType returns the field type for a checked type value. Returns false if
- // the field could not be found.
- FindFieldType(messageType, fieldName string) (*FieldType, bool)
-
- // NewValue creates a new type value from a qualified name and map of field name
- // to value.
- //
- // Note, for each value, the Val.ConvertToNative function will be invoked to convert
- // the Val to the field's native type. If an error occurs during conversion, the
- // NewValue will be a types.Err.
- NewValue(typeName string, fields map[string]Val) Val
-}
-
-// TypeAdapter converts native Go values of varying type and complexity to equivalent CEL values.
-//
-// Deprecated: use types.Adapter
-type TypeAdapter interface {
- // NativeToValue converts the input `value` to a CEL `ref.Val`.
- NativeToValue(value any) Val
-}
-
-// TypeRegistry allows third-parties to add custom types to CEL. Not all `TypeProvider`
-// implementations support type-customization, so these features are optional. However, a
-// `TypeRegistry` should be a `TypeProvider` and a `TypeAdapter` to ensure that types
-// which are registered can be converted to CEL representations.
-//
-// Deprecated: use types.Registry
-type TypeRegistry interface {
- TypeAdapter
- TypeProvider
-
- // RegisterDescriptor registers the contents of a protocol buffer `FileDescriptor`.
- RegisterDescriptor(fileDesc protoreflect.FileDescriptor) error
-
- // RegisterMessage registers a protocol buffer message and its dependencies.
- RegisterMessage(message proto.Message) error
-
- // RegisterType registers a type value with the provider which ensures the
- // provider is aware of how to map the type to an identifier.
- //
- // If a type is provided more than once with an alternative definition, the
- // call will result in an error.
- RegisterType(types ...Type) error
-}
-
-// FieldType represents a field's type value and whether that field supports
-// presence detection.
-//
-// Deprecated: use types.FieldType
-type FieldType struct {
- // Type of the field as a protobuf type value.
- Type *exprpb.Type
-
- // IsSet indicates whether the field is set on an input object.
- IsSet FieldTester
-
- // GetFrom retrieves the field value on the input object, if set.
- GetFrom FieldGetter
-}
-
-// FieldTester is used to test field presence on an input object.
-type FieldTester func(target any) bool
-
-// FieldGetter is used to get the field value from an input object, if set.
-type FieldGetter func(target any) (any, error)
diff --git a/vendor/github.com/google/cel-go/common/types/ref/reference.go b/vendor/github.com/google/cel-go/common/types/ref/reference.go
deleted file mode 100644
index e0d58145c..000000000
--- a/vendor/github.com/google/cel-go/common/types/ref/reference.go
+++ /dev/null
@@ -1,63 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package ref contains the reference interfaces used throughout the types components.
-package ref
-
-import (
- "reflect"
-)
-
-// Type interface indicate the name of a given type.
-type Type interface {
- // HasTrait returns whether the type has a given trait associated with it.
- //
- // See common/types/traits/traits.go for a list of supported traits.
- HasTrait(trait int) bool
-
- // TypeName returns the qualified type name of the type.
- //
- // The type name is also used as the type's identifier name at type-check and interpretation time.
- TypeName() string
-}
-
-// Val interface defines the functions supported by all expression values.
-// Val implementations may specialize the behavior of the value through the addition of traits.
-type Val interface {
- // ConvertToNative converts the Value to a native Go struct according to the
- // reflected type description, or error if the conversion is not feasible.
- //
- // The ConvertToNative method is intended to be used to support conversion between CEL types
- // and native types during object creation expressions or by clients who need to adapt the,
- // returned CEL value into an equivalent Go value instance.
- //
- // When implementing or using ConvertToNative, the following guidelines apply:
- // - Use ConvertToNative when marshalling CEL evaluation results to native types.
- // - Do not use ConvertToNative within CEL extension functions.
- // - Document whether your implementation supports non-CEL field types, such as Go or Protobuf.
- ConvertToNative(typeDesc reflect.Type) (any, error)
-
- // ConvertToType supports type conversions between CEL value types supported by the expression language.
- ConvertToType(typeValue Type) Val
-
- // Equal returns true if the `other` value has the same type and content as the implementing struct.
- Equal(other Val) Val
-
- // Type returns the TypeValue of the value.
- Type() Type
-
- // Value returns the raw value of the instance which may not be directly compatible with the expression
- // language types.
- Value() any
-}
diff --git a/vendor/github.com/google/cel-go/common/types/string.go b/vendor/github.com/google/cel-go/common/types/string.go
deleted file mode 100644
index 028e6824d..000000000
--- a/vendor/github.com/google/cel-go/common/types/string.go
+++ /dev/null
@@ -1,229 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
- "regexp"
- "strconv"
- "strings"
- "time"
-
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
-)
-
-// String type implementation which supports addition, comparison, matching,
-// and size functions.
-type String string
-
-var (
- stringOneArgOverloads = map[string]func(ref.Val, ref.Val) ref.Val{
- overloads.Contains: StringContains,
- overloads.EndsWith: StringEndsWith,
- overloads.StartsWith: StringStartsWith,
- }
-
- stringWrapperType = reflect.TypeOf(&wrapperspb.StringValue{})
-)
-
-// Add implements traits.Adder.Add.
-func (s String) Add(other ref.Val) ref.Val {
- otherString, ok := other.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- return s + otherString
-}
-
-// Compare implements traits.Comparer.Compare.
-func (s String) Compare(other ref.Val) ref.Val {
- otherString, ok := other.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- return Int(strings.Compare(s.Value().(string), otherString.Value().(string)))
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (s String) ConvertToNative(typeDesc reflect.Type) (any, error) {
- switch typeDesc.Kind() {
- case reflect.String:
- if reflect.TypeOf(s).AssignableTo(typeDesc) {
- return s, nil
- }
- return s.Value(), nil
- case reflect.Ptr:
- switch typeDesc {
- case anyValueType:
- // Primitives must be wrapped before being set on an Any field.
- return anypb.New(wrapperspb.String(string(s)))
- case jsonValueType:
- // Convert to a protobuf representation of a JSON String.
- return structpb.NewStringValue(string(s)), nil
- case stringWrapperType:
- // Convert to a wrapperspb.StringValue.
- return wrapperspb.String(string(s)), nil
- }
- if typeDesc.Elem().Kind() == reflect.String {
- p := s.Value().(string)
- return &p, nil
- }
- case reflect.Interface:
- sv := s.Value()
- if reflect.TypeOf(sv).Implements(typeDesc) {
- return sv, nil
- }
- if reflect.TypeOf(s).Implements(typeDesc) {
- return s, nil
- }
- }
- return nil, fmt.Errorf(
- "unsupported native conversion from string to '%v'", typeDesc)
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (s String) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case IntType:
- if n, err := strconv.ParseInt(s.Value().(string), 10, 64); err == nil {
- return Int(n)
- }
- case UintType:
- if n, err := strconv.ParseUint(s.Value().(string), 10, 64); err == nil {
- return Uint(n)
- }
- case DoubleType:
- if n, err := strconv.ParseFloat(s.Value().(string), 64); err == nil {
- return Double(n)
- }
- case BoolType:
- if b, err := strconv.ParseBool(s.Value().(string)); err == nil {
- return Bool(b)
- }
- case BytesType:
- return Bytes(s)
- case DurationType:
- if d, err := time.ParseDuration(s.Value().(string)); err == nil {
- return durationOf(d)
- }
- case TimestampType:
- if t, err := time.Parse(time.RFC3339, s.Value().(string)); err == nil {
- if t.Unix() < minUnixTime || t.Unix() > maxUnixTime {
- return celErrTimestampOverflow
- }
- return timestampOf(t)
- }
- case StringType:
- return s
- case TypeType:
- return StringType
- }
- return NewErr("type conversion error from '%s' to '%s'", StringType, typeVal)
-}
-
-// Equal implements ref.Val.Equal.
-func (s String) Equal(other ref.Val) ref.Val {
- otherString, ok := other.(String)
- return Bool(ok && s == otherString)
-}
-
-// IsZeroValue returns true if the string is empty.
-func (s String) IsZeroValue() bool {
- return len(s) == 0
-}
-
-// Match implements traits.Matcher.Match.
-func (s String) Match(pattern ref.Val) ref.Val {
- pat, ok := pattern.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(pattern)
- }
- matched, err := regexp.MatchString(pat.Value().(string), s.Value().(string))
- if err != nil {
- return &Err{err}
- }
- return Bool(matched)
-}
-
-// Receive implements traits.Receiver.Receive.
-func (s String) Receive(function string, overload string, args []ref.Val) ref.Val {
- switch len(args) {
- case 1:
- if f, found := stringOneArgOverloads[function]; found {
- return f(s, args[0])
- }
- }
- return NoSuchOverloadErr()
-}
-
-// Size implements traits.Sizer.Size.
-func (s String) Size() ref.Val {
- return Int(len([]rune(s.Value().(string))))
-}
-
-// Type implements ref.Val.Type.
-func (s String) Type() ref.Type {
- return StringType
-}
-
-// Value implements ref.Val.Value.
-func (s String) Value() any {
- return string(s)
-}
-
-// StringContains returns whether the string contains a substring.
-func StringContains(s, sub ref.Val) ref.Val {
- str, ok := s.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(s)
- }
- subStr, ok := sub.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(sub)
- }
- return Bool(strings.Contains(string(str), string(subStr)))
-}
-
-// StringEndsWith returns whether the target string contains the input suffix.
-func StringEndsWith(s, suf ref.Val) ref.Val {
- str, ok := s.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(s)
- }
- sufStr, ok := suf.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(suf)
- }
- return Bool(strings.HasSuffix(string(str), string(sufStr)))
-}
-
-// StringStartsWith returns whether the target string contains the input prefix.
-func StringStartsWith(s, pre ref.Val) ref.Val {
- str, ok := s.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(s)
- }
- preStr, ok := pre.(String)
- if !ok {
- return MaybeNoSuchOverloadErr(pre)
- }
- return Bool(strings.HasPrefix(string(str), string(preStr)))
-}
diff --git a/vendor/github.com/google/cel-go/common/types/timestamp.go b/vendor/github.com/google/cel-go/common/types/timestamp.go
deleted file mode 100644
index 33acdea8e..000000000
--- a/vendor/github.com/google/cel-go/common/types/timestamp.go
+++ /dev/null
@@ -1,311 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
- "strconv"
- "strings"
- "time"
-
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- tpb "google.golang.org/protobuf/types/known/timestamppb"
-)
-
-// Timestamp type implementation which supports add, compare, and subtract
-// operations. Timestamps are also capable of participating in dynamic
-// function dispatch to instance methods.
-type Timestamp struct {
- time.Time
-}
-
-func timestampOf(t time.Time) Timestamp {
- // Note that this function does not validate that time.Time is in our supported range.
- return Timestamp{Time: t}
-}
-
-const (
- // The number of seconds between year 1 and year 1970. This is borrowed from
- // https://golang.org/src/time/time.go.
- unixToInternal int64 = (1969*365 + 1969/4 - 1969/100 + 1969/400) * (60 * 60 * 24)
-
- // Number of seconds between `0001-01-01T00:00:00Z` and the Unix epoch.
- minUnixTime int64 = -62135596800
- // Number of seconds between `9999-12-31T23:59:59.999999999Z` and the Unix epoch.
- maxUnixTime int64 = 253402300799
-)
-
-// Add implements traits.Adder.Add.
-func (t Timestamp) Add(other ref.Val) ref.Val {
- switch other.Type() {
- case DurationType:
- return other.(Duration).Add(t)
- }
- return MaybeNoSuchOverloadErr(other)
-}
-
-// Compare implements traits.Comparer.Compare.
-func (t Timestamp) Compare(other ref.Val) ref.Val {
- if TimestampType != other.Type() {
- return MaybeNoSuchOverloadErr(other)
- }
- ts1 := t.Time
- ts2 := other.(Timestamp).Time
- switch {
- case ts1.Before(ts2):
- return IntNegOne
- case ts1.After(ts2):
- return IntOne
- default:
- return IntZero
- }
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (t Timestamp) ConvertToNative(typeDesc reflect.Type) (any, error) {
- // If the timestamp is already assignable to the desired type return it.
- if reflect.TypeOf(t.Time).AssignableTo(typeDesc) {
- return t.Time, nil
- }
- if reflect.TypeOf(t).AssignableTo(typeDesc) {
- return t, nil
- }
- switch typeDesc {
- case anyValueType:
- // Pack the underlying time as a tpb.Timestamp into an Any value.
- return anypb.New(tpb.New(t.Time))
- case jsonValueType:
- // CEL follows the proto3 to JSON conversion which formats as an RFC 3339 encoded JSON
- // string.
- v := t.ConvertToType(StringType)
- if IsError(v) {
- return nil, v.(*Err)
- }
- return structpb.NewStringValue(string(v.(String))), nil
- case timestampValueType:
- // Unwrap the underlying tpb.Timestamp.
- return tpb.New(t.Time), nil
- }
- return nil, fmt.Errorf("type conversion error from 'Timestamp' to '%v'", typeDesc)
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (t Timestamp) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case StringType:
- return String(t.Format(time.RFC3339Nano))
- case IntType:
- // Return the Unix time in seconds since 1970
- return Int(t.Unix())
- case TimestampType:
- return t
- case TypeType:
- return TimestampType
- }
- return NewErr("type conversion error from '%s' to '%s'", TimestampType, typeVal)
-}
-
-// Equal implements ref.Val.Equal.
-func (t Timestamp) Equal(other ref.Val) ref.Val {
- otherTime, ok := other.(Timestamp)
- return Bool(ok && t.Time.Equal(otherTime.Time))
-}
-
-// IsZeroValue returns true if the timestamp is epoch 0.
-func (t Timestamp) IsZeroValue() bool {
- return t.IsZero()
-}
-
-// Receive implements traits.Receiver.Receive.
-func (t Timestamp) Receive(function string, overload string, args []ref.Val) ref.Val {
- switch len(args) {
- case 0:
- if f, found := timestampZeroArgOverloads[function]; found {
- return f(t.Time)
- }
- case 1:
- if f, found := timestampOneArgOverloads[function]; found {
- return f(t.Time, args[0])
- }
- }
- return NoSuchOverloadErr()
-}
-
-// Subtract implements traits.Subtractor.Subtract.
-func (t Timestamp) Subtract(subtrahend ref.Val) ref.Val {
- switch subtrahend.Type() {
- case DurationType:
- dur := subtrahend.(Duration)
- val, err := subtractTimeDurationChecked(t.Time, dur.Duration)
- if err != nil {
- return WrapErr(err)
- }
- return timestampOf(val)
- case TimestampType:
- t2 := subtrahend.(Timestamp).Time
- val, err := subtractTimeChecked(t.Time, t2)
- if err != nil {
- return WrapErr(err)
- }
- return durationOf(val)
- }
- return MaybeNoSuchOverloadErr(subtrahend)
-}
-
-// Type implements ref.Val.Type.
-func (t Timestamp) Type() ref.Type {
- return TimestampType
-}
-
-// Value implements ref.Val.Value.
-func (t Timestamp) Value() any {
- return t.Time
-}
-
-var (
- timestampValueType = reflect.TypeOf(&tpb.Timestamp{})
-
- timestampZeroArgOverloads = map[string]func(time.Time) ref.Val{
- overloads.TimeGetFullYear: timestampGetFullYear,
- overloads.TimeGetMonth: timestampGetMonth,
- overloads.TimeGetDayOfYear: timestampGetDayOfYear,
- overloads.TimeGetDate: timestampGetDayOfMonthOneBased,
- overloads.TimeGetDayOfMonth: timestampGetDayOfMonthZeroBased,
- overloads.TimeGetDayOfWeek: timestampGetDayOfWeek,
- overloads.TimeGetHours: timestampGetHours,
- overloads.TimeGetMinutes: timestampGetMinutes,
- overloads.TimeGetSeconds: timestampGetSeconds,
- overloads.TimeGetMilliseconds: timestampGetMilliseconds}
-
- timestampOneArgOverloads = map[string]func(time.Time, ref.Val) ref.Val{
- overloads.TimeGetFullYear: timestampGetFullYearWithTz,
- overloads.TimeGetMonth: timestampGetMonthWithTz,
- overloads.TimeGetDayOfYear: timestampGetDayOfYearWithTz,
- overloads.TimeGetDate: timestampGetDayOfMonthOneBasedWithTz,
- overloads.TimeGetDayOfMonth: timestampGetDayOfMonthZeroBasedWithTz,
- overloads.TimeGetDayOfWeek: timestampGetDayOfWeekWithTz,
- overloads.TimeGetHours: timestampGetHoursWithTz,
- overloads.TimeGetMinutes: timestampGetMinutesWithTz,
- overloads.TimeGetSeconds: timestampGetSecondsWithTz,
- overloads.TimeGetMilliseconds: timestampGetMillisecondsWithTz}
-)
-
-type timestampVisitor func(time.Time) ref.Val
-
-func timestampGetFullYear(t time.Time) ref.Val {
- return Int(t.Year())
-}
-func timestampGetMonth(t time.Time) ref.Val {
- // CEL spec indicates that the month should be 0-based, but the Time value
- // for Month() is 1-based.
- return Int(t.Month() - 1)
-}
-func timestampGetDayOfYear(t time.Time) ref.Val {
- return Int(t.YearDay() - 1)
-}
-func timestampGetDayOfMonthZeroBased(t time.Time) ref.Val {
- return Int(t.Day() - 1)
-}
-func timestampGetDayOfMonthOneBased(t time.Time) ref.Val {
- return Int(t.Day())
-}
-func timestampGetDayOfWeek(t time.Time) ref.Val {
- return Int(t.Weekday())
-}
-func timestampGetHours(t time.Time) ref.Val {
- return Int(t.Hour())
-}
-func timestampGetMinutes(t time.Time) ref.Val {
- return Int(t.Minute())
-}
-func timestampGetSeconds(t time.Time) ref.Val {
- return Int(t.Second())
-}
-func timestampGetMilliseconds(t time.Time) ref.Val {
- return Int(t.Nanosecond() / 1000000)
-}
-
-func timestampGetFullYearWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetFullYear)(t)
-}
-func timestampGetMonthWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetMonth)(t)
-}
-func timestampGetDayOfYearWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetDayOfYear)(t)
-}
-func timestampGetDayOfMonthZeroBasedWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetDayOfMonthZeroBased)(t)
-}
-func timestampGetDayOfMonthOneBasedWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetDayOfMonthOneBased)(t)
-}
-func timestampGetDayOfWeekWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetDayOfWeek)(t)
-}
-func timestampGetHoursWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetHours)(t)
-}
-func timestampGetMinutesWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetMinutes)(t)
-}
-func timestampGetSecondsWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetSeconds)(t)
-}
-func timestampGetMillisecondsWithTz(t time.Time, tz ref.Val) ref.Val {
- return timeZone(tz, timestampGetMilliseconds)(t)
-}
-
-func timeZone(tz ref.Val, visitor timestampVisitor) timestampVisitor {
- return func(t time.Time) ref.Val {
- if StringType != tz.Type() {
- return MaybeNoSuchOverloadErr(tz)
- }
- val := string(tz.(String))
- ind := strings.Index(val, ":")
- if ind == -1 {
- loc, err := time.LoadLocation(val)
- if err != nil {
- return WrapErr(err)
- }
- return visitor(t.In(loc))
- }
-
- // If the input is not the name of a timezone (for example, 'US/Central'), it should be a numerical offset from UTC
- // in the format ^(+|-)(0[0-9]|1[0-4]):[0-5][0-9]$. The numerical input is parsed in terms of hours and minutes.
- hr, err := strconv.Atoi(string(val[0:ind]))
- if err != nil {
- return WrapErr(err)
- }
- min, err := strconv.Atoi(string(val[ind+1:]))
- if err != nil {
- return WrapErr(err)
- }
- var offset int
- if string(val[0]) == "-" {
- offset = hr*60 - min
- } else {
- offset = hr*60 + min
- }
- secondsEastOfUTC := int((time.Duration(offset) * time.Minute).Seconds())
- timezone := time.FixedZone("", secondsEastOfUTC)
- return visitor(t.In(timezone))
- }
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/BUILD.bazel b/vendor/github.com/google/cel-go/common/types/traits/BUILD.bazel
deleted file mode 100644
index b19eb8301..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/BUILD.bazel
+++ /dev/null
@@ -1,29 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "comparer.go",
- "container.go",
- "field_tester.go",
- "indexer.go",
- "iterator.go",
- "lister.go",
- "mapper.go",
- "matcher.go",
- "math.go",
- "receiver.go",
- "sizer.go",
- "traits.go",
- "zeroer.go",
- ],
- importpath = "github.com/google/cel-go/common/types/traits",
- deps = [
- "//common/types/ref:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/common/types/traits/comparer.go b/vendor/github.com/google/cel-go/common/types/traits/comparer.go
deleted file mode 100644
index b531d9ae2..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/comparer.go
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import (
- "github.com/google/cel-go/common/types/ref"
-)
-
-// Comparer interface for ordering comparisons between values in order to
-// support '<', '<=', '>=', '>' overloads.
-type Comparer interface {
- // Compare this value to the input other value, returning an Int:
- //
- // this < other -> Int(-1)
- // this == other -> Int(0)
- // this > other -> Int(1)
- //
- // If the comparison cannot be made or is not supported, an error should
- // be returned.
- Compare(other ref.Val) ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/container.go b/vendor/github.com/google/cel-go/common/types/traits/container.go
deleted file mode 100644
index cf5c621ae..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/container.go
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import "github.com/google/cel-go/common/types/ref"
-
-// Container interface which permits containment tests such as 'a in b'.
-type Container interface {
- // Contains returns true if the value exists within the object.
- Contains(value ref.Val) ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/field_tester.go b/vendor/github.com/google/cel-go/common/types/traits/field_tester.go
deleted file mode 100644
index 816a95652..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/field_tester.go
+++ /dev/null
@@ -1,30 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import (
- "github.com/google/cel-go/common/types/ref"
-)
-
-// FieldTester indicates if a defined field on an object type is set to a
-// non-default value.
-//
-// For use with the `has()` macro.
-type FieldTester interface {
- // IsSet returns true if the field is defined and set to a non-default
- // value. The method will return false if defined and not set, and an error
- // if the field is not defined.
- IsSet(field ref.Val) ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/indexer.go b/vendor/github.com/google/cel-go/common/types/traits/indexer.go
deleted file mode 100644
index 662c6836c..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/indexer.go
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import (
- "github.com/google/cel-go/common/types/ref"
-)
-
-// Indexer permits random access of elements by index 'a[b()]'.
-type Indexer interface {
- // Get the value at the specified index or error.
- Get(index ref.Val) ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/iterator.go b/vendor/github.com/google/cel-go/common/types/traits/iterator.go
deleted file mode 100644
index 42dd371aa..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/iterator.go
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import (
- "github.com/google/cel-go/common/types/ref"
-)
-
-// Iterable aggregate types permit traversal over their elements.
-type Iterable interface {
- // Iterator returns a new iterator view of the struct.
- Iterator() Iterator
-}
-
-// Iterator permits safe traversal over the contents of an aggregate type.
-type Iterator interface {
- ref.Val
-
- // HasNext returns true if there are unvisited elements in the Iterator.
- HasNext() ref.Val
-
- // Next returns the next element.
- Next() ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/lister.go b/vendor/github.com/google/cel-go/common/types/traits/lister.go
deleted file mode 100644
index 5cf2593f3..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/lister.go
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import "github.com/google/cel-go/common/types/ref"
-
-// Lister interface which aggregates the traits of a list.
-type Lister interface {
- ref.Val
- Adder
- Container
- Indexer
- Iterable
- Sizer
-}
-
-// MutableLister interface which emits an immutable result after an intermediate computation.
-type MutableLister interface {
- Lister
- ToImmutableList() Lister
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/mapper.go b/vendor/github.com/google/cel-go/common/types/traits/mapper.go
deleted file mode 100644
index 2f7c919a8..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/mapper.go
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import "github.com/google/cel-go/common/types/ref"
-
-// Mapper interface which aggregates the traits of a maps.
-type Mapper interface {
- ref.Val
- Container
- Indexer
- Iterable
- Sizer
-
- // Find returns a value, if one exists, for the input key.
- //
- // If the key is not found the function returns (nil, false).
- // If the input key is not valid for the map, or is Err or Unknown the function returns
- // (Unknown|Err, false).
- Find(key ref.Val) (ref.Val, bool)
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/matcher.go b/vendor/github.com/google/cel-go/common/types/traits/matcher.go
deleted file mode 100644
index 085dc94ff..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/matcher.go
+++ /dev/null
@@ -1,23 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import "github.com/google/cel-go/common/types/ref"
-
-// Matcher interface for supporting 'matches()' overloads.
-type Matcher interface {
- // Match returns true if the pattern matches the current value.
- Match(pattern ref.Val) ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/math.go b/vendor/github.com/google/cel-go/common/types/traits/math.go
deleted file mode 100644
index 86d5b9137..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/math.go
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import "github.com/google/cel-go/common/types/ref"
-
-// Adder interface to support '+' operator overloads.
-type Adder interface {
- // Add returns a combination of the current value and other value.
- //
- // If the other value is an unsupported type, an error is returned.
- Add(other ref.Val) ref.Val
-}
-
-// Divider interface to support '/' operator overloads.
-type Divider interface {
- // Divide returns the result of dividing the current value by the input
- // denominator.
- //
- // A denominator value of zero results in an error.
- Divide(denominator ref.Val) ref.Val
-}
-
-// Modder interface to support '%' operator overloads.
-type Modder interface {
- // Modulo returns the result of taking the modulus of the current value
- // by the denominator.
- //
- // A denominator value of zero results in an error.
- Modulo(denominator ref.Val) ref.Val
-}
-
-// Multiplier interface to support '*' operator overloads.
-type Multiplier interface {
- // Multiply returns the result of multiplying the current and input value.
- Multiply(other ref.Val) ref.Val
-}
-
-// Negater interface to support unary '-' and '!' operator overloads.
-type Negater interface {
- // Negate returns the complement of the current value.
- Negate() ref.Val
-}
-
-// Subtractor interface to support binary '-' operator overloads.
-type Subtractor interface {
- // Subtract returns the result of subtracting the input from the current
- // value.
- Subtract(subtrahend ref.Val) ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/receiver.go b/vendor/github.com/google/cel-go/common/types/traits/receiver.go
deleted file mode 100644
index 8f41db45e..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/receiver.go
+++ /dev/null
@@ -1,24 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import "github.com/google/cel-go/common/types/ref"
-
-// Receiver interface for routing instance method calls within a value.
-type Receiver interface {
- // Receive accepts a function name, overload id, and arguments and returns
- // a value.
- Receive(function string, overload string, args []ref.Val) ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/sizer.go b/vendor/github.com/google/cel-go/common/types/traits/sizer.go
deleted file mode 100644
index b80d25137..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/sizer.go
+++ /dev/null
@@ -1,25 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-import (
- "github.com/google/cel-go/common/types/ref"
-)
-
-// Sizer interface for supporting 'size()' overloads.
-type Sizer interface {
- // Size returns the number of elements or length of the value.
- Size() ref.Val
-}
diff --git a/vendor/github.com/google/cel-go/common/types/traits/traits.go b/vendor/github.com/google/cel-go/common/types/traits/traits.go
deleted file mode 100644
index 6da3e6a3e..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/traits.go
+++ /dev/null
@@ -1,64 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package traits defines interfaces that a type may implement to participate
-// in operator overloads and function dispatch.
-package traits
-
-const (
- // AdderType types provide a '+' operator overload.
- AdderType = 1 << iota
-
- // ComparerType types support ordering comparisons '<', '<=', '>', '>='.
- ComparerType
-
- // ContainerType types support 'in' operations.
- ContainerType
-
- // DividerType types support '/' operations.
- DividerType
-
- // FieldTesterType types support the detection of field value presence.
- FieldTesterType
-
- // IndexerType types support index access with dynamic values.
- IndexerType
-
- // IterableType types can be iterated over in comprehensions.
- IterableType
-
- // IteratorType types support iterator semantics.
- IteratorType
-
- // MatcherType types support pattern matching via 'matches' method.
- MatcherType
-
- // ModderType types support modulus operations '%'
- ModderType
-
- // MultiplierType types support '*' operations.
- MultiplierType
-
- // NegatorType types support either negation via '!' or '-'
- NegatorType
-
- // ReceiverType types support dynamic dispatch to instance methods.
- ReceiverType
-
- // SizerType types support the size() method.
- SizerType
-
- // SubtractorType type support '-' operations.
- SubtractorType
-)
diff --git a/vendor/github.com/google/cel-go/common/types/traits/zeroer.go b/vendor/github.com/google/cel-go/common/types/traits/zeroer.go
deleted file mode 100644
index 0b7c830a2..000000000
--- a/vendor/github.com/google/cel-go/common/types/traits/zeroer.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package traits
-
-// Zeroer interface for testing whether a CEL value is a zero value for its type.
-type Zeroer interface {
- // IsZeroValue indicates whether the object is the zero value for the type.
- IsZeroValue() bool
-}
diff --git a/vendor/github.com/google/cel-go/common/types/types.go b/vendor/github.com/google/cel-go/common/types/types.go
deleted file mode 100644
index 76624eefd..000000000
--- a/vendor/github.com/google/cel-go/common/types/types.go
+++ /dev/null
@@ -1,806 +0,0 @@
-// Copyright 2023 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "reflect"
- "strings"
-
- chkdecls "github.com/google/cel-go/checker/decls"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-
- exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1"
-)
-
-// Kind indicates a CEL type's kind which is used to differentiate quickly between simple
-// and complex types.
-type Kind uint
-
-const (
- // UnspecifiedKind is returned when the type is nil or its kind is not specified.
- UnspecifiedKind Kind = iota
-
- // DynKind represents a dynamic type. This kind only exists at type-check time.
- DynKind
-
- // AnyKind represents a google.protobuf.Any type. This kind only exists at type-check time.
- // Prefer DynKind to AnyKind as AnyKind has a specific meaning which is based on protobuf
- // well-known types.
- AnyKind
-
- // BoolKind represents a boolean type.
- BoolKind
-
- // BytesKind represents a bytes type.
- BytesKind
-
- // DoubleKind represents a double type.
- DoubleKind
-
- // DurationKind represents a CEL duration type.
- DurationKind
-
- // ErrorKind represents a CEL error type.
- ErrorKind
-
- // IntKind represents an integer type.
- IntKind
-
- // ListKind represents a list type.
- ListKind
-
- // MapKind represents a map type.
- MapKind
-
- // NullTypeKind represents a null type.
- NullTypeKind
-
- // OpaqueKind represents an abstract type which has no accessible fields.
- OpaqueKind
-
- // StringKind represents a string type.
- StringKind
-
- // StructKind represents a structured object with typed fields.
- StructKind
-
- // TimestampKind represents a a CEL time type.
- TimestampKind
-
- // TypeKind represents the CEL type.
- TypeKind
-
- // TypeParamKind represents a parameterized type whose type name will be resolved at type-check time, if possible.
- TypeParamKind
-
- // UintKind represents a uint type.
- UintKind
-
- // UnknownKind represents an unknown value type.
- UnknownKind
-)
-
-var (
- // AnyType represents the google.protobuf.Any type.
- AnyType = &Type{
- kind: AnyKind,
- runtimeTypeName: "google.protobuf.Any",
- traitMask: traits.FieldTesterType |
- traits.IndexerType,
- }
- // BoolType represents the bool type.
- BoolType = &Type{
- kind: BoolKind,
- runtimeTypeName: "bool",
- traitMask: traits.ComparerType |
- traits.NegatorType,
- }
- // BytesType represents the bytes type.
- BytesType = &Type{
- kind: BytesKind,
- runtimeTypeName: "bytes",
- traitMask: traits.AdderType |
- traits.ComparerType |
- traits.SizerType,
- }
- // DoubleType represents the double type.
- DoubleType = &Type{
- kind: DoubleKind,
- runtimeTypeName: "double",
- traitMask: traits.AdderType |
- traits.ComparerType |
- traits.DividerType |
- traits.MultiplierType |
- traits.NegatorType |
- traits.SubtractorType,
- }
- // DurationType represents the CEL duration type.
- DurationType = &Type{
- kind: DurationKind,
- runtimeTypeName: "google.protobuf.Duration",
- traitMask: traits.AdderType |
- traits.ComparerType |
- traits.NegatorType |
- traits.ReceiverType |
- traits.SubtractorType,
- }
- // DynType represents a dynamic CEL type whose type will be determined at runtime from context.
- DynType = &Type{
- kind: DynKind,
- runtimeTypeName: "dyn",
- }
- // ErrorType represents a CEL error value.
- ErrorType = &Type{
- kind: ErrorKind,
- runtimeTypeName: "error",
- }
- // IntType represents the int type.
- IntType = &Type{
- kind: IntKind,
- runtimeTypeName: "int",
- traitMask: traits.AdderType |
- traits.ComparerType |
- traits.DividerType |
- traits.ModderType |
- traits.MultiplierType |
- traits.NegatorType |
- traits.SubtractorType,
- }
- // ListType represents the runtime list type.
- ListType = NewListType(nil)
- // MapType represents the runtime map type.
- MapType = NewMapType(nil, nil)
- // NullType represents the type of a null value.
- NullType = &Type{
- kind: NullTypeKind,
- runtimeTypeName: "null_type",
- }
- // StringType represents the string type.
- StringType = &Type{
- kind: StringKind,
- runtimeTypeName: "string",
- traitMask: traits.AdderType |
- traits.ComparerType |
- traits.MatcherType |
- traits.ReceiverType |
- traits.SizerType,
- }
- // TimestampType represents the time type.
- TimestampType = &Type{
- kind: TimestampKind,
- runtimeTypeName: "google.protobuf.Timestamp",
- traitMask: traits.AdderType |
- traits.ComparerType |
- traits.ReceiverType |
- traits.SubtractorType,
- }
- // TypeType represents a CEL type
- TypeType = &Type{
- kind: TypeKind,
- runtimeTypeName: "type",
- }
- // UintType represents a uint type.
- UintType = &Type{
- kind: UintKind,
- runtimeTypeName: "uint",
- traitMask: traits.AdderType |
- traits.ComparerType |
- traits.DividerType |
- traits.ModderType |
- traits.MultiplierType |
- traits.SubtractorType,
- }
- // UnknownType represents an unknown value type.
- UnknownType = &Type{
- kind: UnknownKind,
- runtimeTypeName: "unknown",
- }
-)
-
-var _ ref.Type = &Type{}
-var _ ref.Val = &Type{}
-
-// Type holds a reference to a runtime type with an optional type-checked set of type parameters.
-type Type struct {
- // kind indicates general category of the type.
- kind Kind
-
- // parameters holds the optional type-checked set of type Parameters that are used during static analysis.
- parameters []*Type
-
- // runtimeTypeName indicates the runtime type name of the type.
- runtimeTypeName string
-
- // isAssignableType function determines whether one type is assignable to this type.
- // A nil value for the isAssignableType function falls back to equality of kind, runtimeType, and parameters.
- isAssignableType func(other *Type) bool
-
- // isAssignableRuntimeType function determines whether the runtime type (with erasure) is assignable to this type.
- // A nil value for the isAssignableRuntimeType function falls back to the equality of the type or type name.
- isAssignableRuntimeType func(other ref.Val) bool
-
- // traitMask is a mask of flags which indicate the capabilities of the type.
- traitMask int
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (t *Type) ConvertToNative(typeDesc reflect.Type) (any, error) {
- return nil, fmt.Errorf("type conversion not supported for 'type'")
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (t *Type) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case TypeType:
- return TypeType
- case StringType:
- return String(t.TypeName())
- }
- return NewErr("type conversion error from '%s' to '%s'", TypeType, typeVal)
-}
-
-// Equal indicates whether two types have the same runtime type name.
-//
-// The name Equal is a bit of a misnomer, but for historical reasons, this is the
-// runtime behavior. For a more accurate definition see IsType().
-func (t *Type) Equal(other ref.Val) ref.Val {
- otherType, ok := other.(ref.Type)
- return Bool(ok && t.TypeName() == otherType.TypeName())
-}
-
-// HasTrait implements the ref.Type interface method.
-func (t *Type) HasTrait(trait int) bool {
- return trait&t.traitMask == trait
-}
-
-// IsExactType indicates whether the two types are exactly the same. This check also verifies type parameter type names.
-func (t *Type) IsExactType(other *Type) bool {
- return t.isTypeInternal(other, true)
-}
-
-// IsEquivalentType indicates whether two types are equivalent. This check ignores type parameter type names.
-func (t *Type) IsEquivalentType(other *Type) bool {
- return t.isTypeInternal(other, false)
-}
-
-// Kind indicates general category of the type.
-func (t *Type) Kind() Kind {
- if t == nil {
- return UnspecifiedKind
- }
- return t.kind
-}
-
-// isTypeInternal checks whether the two types are equivalent or exactly the same based on the checkTypeParamName flag.
-func (t *Type) isTypeInternal(other *Type, checkTypeParamName bool) bool {
- if t == nil {
- return false
- }
- if t == other {
- return true
- }
- if t.Kind() != other.Kind() || len(t.Parameters()) != len(other.Parameters()) {
- return false
- }
- if (checkTypeParamName || t.Kind() != TypeParamKind) && t.TypeName() != other.TypeName() {
- return false
- }
- for i, p := range t.Parameters() {
- if !p.isTypeInternal(other.Parameters()[i], checkTypeParamName) {
- return false
- }
- }
- return true
-}
-
-// IsAssignableType determines whether the current type is type-check assignable from the input fromType.
-func (t *Type) IsAssignableType(fromType *Type) bool {
- if t == nil {
- return false
- }
- if t.isAssignableType != nil {
- return t.isAssignableType(fromType)
- }
- return t.defaultIsAssignableType(fromType)
-}
-
-// IsAssignableRuntimeType determines whether the current type is runtime assignable from the input runtimeType.
-//
-// At runtime, parameterized types are erased and so a function which type-checks to support a map(string, string)
-// will have a runtime assignable type of a map.
-func (t *Type) IsAssignableRuntimeType(val ref.Val) bool {
- if t == nil {
- return false
- }
- if t.isAssignableRuntimeType != nil {
- return t.isAssignableRuntimeType(val)
- }
- return t.defaultIsAssignableRuntimeType(val)
-}
-
-// Parameters returns the list of type parameters if set.
-//
-// For ListKind, Parameters()[0] represents the list element type
-// For MapKind, Parameters()[0] represents the map key type, and Parameters()[1] represents the map
-// value type.
-func (t *Type) Parameters() []*Type {
- if t == nil {
- return emptyParams
- }
- return t.parameters
-}
-
-// DeclaredTypeName indicates the fully qualified and parameterized type-check type name.
-func (t *Type) DeclaredTypeName() string {
- // if the type itself is neither null, nor dyn, but is assignable to null, then it's a wrapper type.
- if t.Kind() != NullTypeKind && !t.isDyn() && t.IsAssignableType(NullType) {
- return fmt.Sprintf("wrapper(%s)", t.TypeName())
- }
- return t.TypeName()
-}
-
-// Type implements the ref.Val interface method.
-func (t *Type) Type() ref.Type {
- return TypeType
-}
-
-// Value implements the ref.Val interface method.
-func (t *Type) Value() any {
- return t.TypeName()
-}
-
-// TypeName returns the type-erased fully qualified runtime type name.
-//
-// TypeName implements the ref.Type interface method.
-func (t *Type) TypeName() string {
- if t == nil {
- return ""
- }
- return t.runtimeTypeName
-}
-
-// String returns a human-readable definition of the type name.
-func (t *Type) String() string {
- if len(t.Parameters()) == 0 {
- return t.DeclaredTypeName()
- }
- params := make([]string, len(t.Parameters()))
- for i, p := range t.Parameters() {
- params[i] = p.String()
- }
- return fmt.Sprintf("%s(%s)", t.DeclaredTypeName(), strings.Join(params, ", "))
-}
-
-// isDyn indicates whether the type is dynamic in any way.
-func (t *Type) isDyn() bool {
- k := t.Kind()
- return k == DynKind || k == AnyKind || k == TypeParamKind
-}
-
-// defaultIsAssignableType provides the standard definition of what it means for one type to be assignable to another
-// where any of the following may return a true result:
-// - The from types are the same instance
-// - The target type is dynamic
-// - The fromType has the same kind and type name as the target type, and all parameters of the target type
-//
-// are IsAssignableType() from the parameters of the fromType.
-func (t *Type) defaultIsAssignableType(fromType *Type) bool {
- if t == fromType || t.isDyn() {
- return true
- }
- if t.Kind() != fromType.Kind() ||
- t.TypeName() != fromType.TypeName() ||
- len(t.Parameters()) != len(fromType.Parameters()) {
- return false
- }
- for i, tp := range t.Parameters() {
- fp := fromType.Parameters()[i]
- if !tp.IsAssignableType(fp) {
- return false
- }
- }
- return true
-}
-
-// defaultIsAssignableRuntimeType inspects the type and in the case of list and map elements, the key and element types
-// to determine whether a ref.Val is assignable to the declared type for a function signature.
-func (t *Type) defaultIsAssignableRuntimeType(val ref.Val) bool {
- valType := val.Type()
- // If the current type and value type don't agree, then return
- if !(t.isDyn() || t.TypeName() == valType.TypeName()) {
- return false
- }
- switch t.Kind() {
- case ListKind:
- elemType := t.Parameters()[0]
- l := val.(traits.Lister)
- if l.Size() == IntZero {
- return true
- }
- it := l.Iterator()
- elemVal := it.Next()
- return elemType.IsAssignableRuntimeType(elemVal)
- case MapKind:
- keyType := t.Parameters()[0]
- elemType := t.Parameters()[1]
- m := val.(traits.Mapper)
- if m.Size() == IntZero {
- return true
- }
- it := m.Iterator()
- keyVal := it.Next()
- elemVal := m.Get(keyVal)
- return keyType.IsAssignableRuntimeType(keyVal) && elemType.IsAssignableRuntimeType(elemVal)
- }
- return true
-}
-
-// NewListType creates an instances of a list type value with the provided element type.
-func NewListType(elemType *Type) *Type {
- return &Type{
- kind: ListKind,
- parameters: []*Type{elemType},
- runtimeTypeName: "list",
- traitMask: traits.AdderType |
- traits.ContainerType |
- traits.IndexerType |
- traits.IterableType |
- traits.SizerType,
- }
-}
-
-// NewMapType creates an instance of a map type value with the provided key and value types.
-func NewMapType(keyType, valueType *Type) *Type {
- return &Type{
- kind: MapKind,
- parameters: []*Type{keyType, valueType},
- runtimeTypeName: "map",
- traitMask: traits.ContainerType |
- traits.IndexerType |
- traits.IterableType |
- traits.SizerType,
- }
-}
-
-// NewNullableType creates an instance of a nullable type with the provided wrapped type.
-//
-// Note: only primitive types are supported as wrapped types.
-func NewNullableType(wrapped *Type) *Type {
- return &Type{
- kind: wrapped.Kind(),
- parameters: wrapped.Parameters(),
- runtimeTypeName: wrapped.TypeName(),
- traitMask: wrapped.traitMask,
- isAssignableType: func(other *Type) bool {
- return NullType.IsAssignableType(other) || wrapped.IsAssignableType(other)
- },
- isAssignableRuntimeType: func(other ref.Val) bool {
- return NullType.IsAssignableRuntimeType(other) || wrapped.IsAssignableRuntimeType(other)
- },
- }
-}
-
-// NewOptionalType creates an abstract parameterized type instance corresponding to CEL's notion of optional.
-func NewOptionalType(param *Type) *Type {
- return NewOpaqueType("optional", param)
-}
-
-// NewOpaqueType creates an abstract parameterized type with a given name.
-func NewOpaqueType(name string, params ...*Type) *Type {
- return &Type{
- kind: OpaqueKind,
- parameters: params,
- runtimeTypeName: name,
- }
-}
-
-// NewObjectType creates a type reference to an externally defined type, e.g. a protobuf message type.
-//
-// An object type is assumed to support field presence testing and field indexing. Additionally, the
-// type may also indicate additional traits through the use of the optional traits vararg argument.
-func NewObjectType(typeName string, traits ...int) *Type {
- // Function sanitizes object types on the fly
- if wkt, found := checkedWellKnowns[typeName]; found {
- return wkt
- }
- traitMask := 0
- for _, trait := range traits {
- traitMask |= trait
- }
- return &Type{
- kind: StructKind,
- parameters: emptyParams,
- runtimeTypeName: typeName,
- traitMask: structTypeTraitMask | traitMask,
- }
-}
-
-// NewObjectTypeValue creates a type reference to an externally defined type.
-//
-// Deprecated: use cel.ObjectType(typeName)
-func NewObjectTypeValue(typeName string) *Type {
- return NewObjectType(typeName)
-}
-
-// NewTypeValue creates an opaque type which has a set of optional type traits as defined in
-// the common/types/traits package.
-//
-// Deprecated: use cel.ObjectType(typeName, traits)
-func NewTypeValue(typeName string, traits ...int) *Type {
- traitMask := 0
- for _, trait := range traits {
- traitMask |= trait
- }
- return &Type{
- kind: StructKind,
- parameters: emptyParams,
- runtimeTypeName: typeName,
- traitMask: traitMask,
- }
-}
-
-// NewTypeParamType creates a parameterized type instance.
-func NewTypeParamType(paramName string) *Type {
- return &Type{
- kind: TypeParamKind,
- runtimeTypeName: paramName,
- }
-}
-
-// NewTypeTypeWithParam creates a type with a type parameter.
-// Used for type-checking purposes, but equivalent to TypeType otherwise.
-func NewTypeTypeWithParam(param *Type) *Type {
- return &Type{
- kind: TypeKind,
- runtimeTypeName: "type",
- parameters: []*Type{param},
- }
-}
-
-// TypeToExprType converts a CEL-native type representation to a protobuf CEL Type representation.
-func TypeToExprType(t *Type) (*exprpb.Type, error) {
- switch t.Kind() {
- case AnyKind:
- return chkdecls.Any, nil
- case BoolKind:
- return maybeWrapper(t, chkdecls.Bool), nil
- case BytesKind:
- return maybeWrapper(t, chkdecls.Bytes), nil
- case DoubleKind:
- return maybeWrapper(t, chkdecls.Double), nil
- case DurationKind:
- return chkdecls.Duration, nil
- case DynKind:
- return chkdecls.Dyn, nil
- case ErrorKind:
- return chkdecls.Error, nil
- case IntKind:
- return maybeWrapper(t, chkdecls.Int), nil
- case ListKind:
- if len(t.Parameters()) != 1 {
- return nil, fmt.Errorf("invalid list, got %d parameters, wanted one", len(t.Parameters()))
- }
- et, err := TypeToExprType(t.Parameters()[0])
- if err != nil {
- return nil, err
- }
- return chkdecls.NewListType(et), nil
- case MapKind:
- if len(t.Parameters()) != 2 {
- return nil, fmt.Errorf("invalid map, got %d parameters, wanted two", len(t.Parameters()))
- }
- kt, err := TypeToExprType(t.Parameters()[0])
- if err != nil {
- return nil, err
- }
- vt, err := TypeToExprType(t.Parameters()[1])
- if err != nil {
- return nil, err
- }
- return chkdecls.NewMapType(kt, vt), nil
- case NullTypeKind:
- return chkdecls.Null, nil
- case OpaqueKind:
- params := make([]*exprpb.Type, len(t.Parameters()))
- for i, p := range t.Parameters() {
- pt, err := TypeToExprType(p)
- if err != nil {
- return nil, err
- }
- params[i] = pt
- }
- return chkdecls.NewAbstractType(t.TypeName(), params...), nil
- case StringKind:
- return maybeWrapper(t, chkdecls.String), nil
- case StructKind:
- return chkdecls.NewObjectType(t.TypeName()), nil
- case TimestampKind:
- return chkdecls.Timestamp, nil
- case TypeParamKind:
- return chkdecls.NewTypeParamType(t.TypeName()), nil
- case TypeKind:
- if len(t.Parameters()) == 1 {
- p, err := TypeToExprType(t.Parameters()[0])
- if err != nil {
- return nil, err
- }
- return chkdecls.NewTypeType(p), nil
- }
- return chkdecls.NewTypeType(nil), nil
- case UintKind:
- return maybeWrapper(t, chkdecls.Uint), nil
- }
- return nil, fmt.Errorf("missing type conversion to proto: %v", t)
-}
-
-// ExprTypeToType converts a protobuf CEL type representation to a CEL-native type representation.
-func ExprTypeToType(t *exprpb.Type) (*Type, error) {
- switch t.GetTypeKind().(type) {
- case *exprpb.Type_Dyn:
- return DynType, nil
- case *exprpb.Type_AbstractType_:
- paramTypes := make([]*Type, len(t.GetAbstractType().GetParameterTypes()))
- for i, p := range t.GetAbstractType().GetParameterTypes() {
- pt, err := ExprTypeToType(p)
- if err != nil {
- return nil, err
- }
- paramTypes[i] = pt
- }
- return NewOpaqueType(t.GetAbstractType().GetName(), paramTypes...), nil
- case *exprpb.Type_ListType_:
- et, err := ExprTypeToType(t.GetListType().GetElemType())
- if err != nil {
- return nil, err
- }
- return NewListType(et), nil
- case *exprpb.Type_MapType_:
- kt, err := ExprTypeToType(t.GetMapType().GetKeyType())
- if err != nil {
- return nil, err
- }
- vt, err := ExprTypeToType(t.GetMapType().GetValueType())
- if err != nil {
- return nil, err
- }
- return NewMapType(kt, vt), nil
- case *exprpb.Type_MessageType:
- return NewObjectType(t.GetMessageType()), nil
- case *exprpb.Type_Null:
- return NullType, nil
- case *exprpb.Type_Primitive:
- switch t.GetPrimitive() {
- case exprpb.Type_BOOL:
- return BoolType, nil
- case exprpb.Type_BYTES:
- return BytesType, nil
- case exprpb.Type_DOUBLE:
- return DoubleType, nil
- case exprpb.Type_INT64:
- return IntType, nil
- case exprpb.Type_STRING:
- return StringType, nil
- case exprpb.Type_UINT64:
- return UintType, nil
- default:
- return nil, fmt.Errorf("unsupported primitive type: %v", t)
- }
- case *exprpb.Type_TypeParam:
- return NewTypeParamType(t.GetTypeParam()), nil
- case *exprpb.Type_Type:
- if t.GetType().GetTypeKind() != nil {
- p, err := ExprTypeToType(t.GetType())
- if err != nil {
- return nil, err
- }
- return NewTypeTypeWithParam(p), nil
- }
- return TypeType, nil
- case *exprpb.Type_WellKnown:
- switch t.GetWellKnown() {
- case exprpb.Type_ANY:
- return AnyType, nil
- case exprpb.Type_DURATION:
- return DurationType, nil
- case exprpb.Type_TIMESTAMP:
- return TimestampType, nil
- default:
- return nil, fmt.Errorf("unsupported well-known type: %v", t)
- }
- case *exprpb.Type_Wrapper:
- t, err := ExprTypeToType(&exprpb.Type{TypeKind: &exprpb.Type_Primitive{Primitive: t.GetWrapper()}})
- if err != nil {
- return nil, err
- }
- return NewNullableType(t), nil
- case *exprpb.Type_Error:
- return ErrorType, nil
- default:
- return nil, fmt.Errorf("unsupported type: %v", t)
- }
-}
-
-func maybeWrapper(t *Type, pbType *exprpb.Type) *exprpb.Type {
- if t.IsAssignableType(NullType) {
- return chkdecls.NewWrapperType(pbType)
- }
- return pbType
-}
-
-func maybeForeignType(t ref.Type) *Type {
- if celType, ok := t.(*Type); ok {
- return celType
- }
- // Inspect the incoming type to determine its traits. The assumption will be that the incoming
- // type does not have any field values; however, if the trait mask indicates that field testing
- // and indexing are supported, the foreign type is marked as a struct.
- traitMask := 0
- for _, trait := range allTraits {
- if t.HasTrait(trait) {
- traitMask |= trait
- }
- }
- // Treat the value like a struct. If it has no fields, this is harmless to denote the type
- // as such since it basically becomes an opaque type by convention.
- return NewObjectType(t.TypeName(), traitMask)
-}
-
-var (
- checkedWellKnowns = map[string]*Type{
- // Wrapper types.
- "google.protobuf.BoolValue": NewNullableType(BoolType),
- "google.protobuf.BytesValue": NewNullableType(BytesType),
- "google.protobuf.DoubleValue": NewNullableType(DoubleType),
- "google.protobuf.FloatValue": NewNullableType(DoubleType),
- "google.protobuf.Int64Value": NewNullableType(IntType),
- "google.protobuf.Int32Value": NewNullableType(IntType),
- "google.protobuf.UInt64Value": NewNullableType(UintType),
- "google.protobuf.UInt32Value": NewNullableType(UintType),
- "google.protobuf.StringValue": NewNullableType(StringType),
- // Well-known types.
- "google.protobuf.Any": AnyType,
- "google.protobuf.Duration": DurationType,
- "google.protobuf.Timestamp": TimestampType,
- // Json types.
- "google.protobuf.ListValue": NewListType(DynType),
- "google.protobuf.NullValue": NullType,
- "google.protobuf.Struct": NewMapType(StringType, DynType),
- "google.protobuf.Value": DynType,
- }
-
- emptyParams = []*Type{}
-
- allTraits = []int{
- traits.AdderType,
- traits.ComparerType,
- traits.ContainerType,
- traits.DividerType,
- traits.FieldTesterType,
- traits.IndexerType,
- traits.IterableType,
- traits.IteratorType,
- traits.MatcherType,
- traits.ModderType,
- traits.MultiplierType,
- traits.NegatorType,
- traits.ReceiverType,
- traits.SizerType,
- traits.SubtractorType,
- }
-
- structTypeTraitMask = traits.FieldTesterType | traits.IndexerType
-)
diff --git a/vendor/github.com/google/cel-go/common/types/uint.go b/vendor/github.com/google/cel-go/common/types/uint.go
deleted file mode 100644
index 3257f9ade..000000000
--- a/vendor/github.com/google/cel-go/common/types/uint.go
+++ /dev/null
@@ -1,244 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "math"
- "reflect"
- "strconv"
-
- "github.com/google/cel-go/common/types/ref"
-
- anypb "google.golang.org/protobuf/types/known/anypb"
- structpb "google.golang.org/protobuf/types/known/structpb"
- wrapperspb "google.golang.org/protobuf/types/known/wrapperspb"
-)
-
-// Uint type implementation which supports comparison and math operators.
-type Uint uint64
-
-var (
- uint32WrapperType = reflect.TypeOf(&wrapperspb.UInt32Value{})
-
- uint64WrapperType = reflect.TypeOf(&wrapperspb.UInt64Value{})
-)
-
-// Uint constants
-const (
- uintZero = Uint(0)
-)
-
-// Add implements traits.Adder.Add.
-func (i Uint) Add(other ref.Val) ref.Val {
- otherUint, ok := other.(Uint)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- val, err := addUint64Checked(uint64(i), uint64(otherUint))
- if err != nil {
- return WrapErr(err)
- }
- return Uint(val)
-}
-
-// Compare implements traits.Comparer.Compare.
-func (i Uint) Compare(other ref.Val) ref.Val {
- switch ov := other.(type) {
- case Double:
- if math.IsNaN(float64(ov)) {
- return NewErr("NaN values cannot be ordered")
- }
- return compareUintDouble(i, ov)
- case Int:
- return compareUintInt(i, ov)
- case Uint:
- return compareUint(i, ov)
- default:
- return MaybeNoSuchOverloadErr(other)
- }
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (i Uint) ConvertToNative(typeDesc reflect.Type) (any, error) {
- switch typeDesc.Kind() {
- case reflect.Uint, reflect.Uint32:
- v, err := uint64ToUint32Checked(uint64(i))
- if err != nil {
- return 0, err
- }
- return reflect.ValueOf(v).Convert(typeDesc).Interface(), nil
- case reflect.Uint64:
- return reflect.ValueOf(i).Convert(typeDesc).Interface(), nil
- case reflect.Ptr:
- switch typeDesc {
- case anyValueType:
- // Primitives must be wrapped before being set on an Any field.
- return anypb.New(wrapperspb.UInt64(uint64(i)))
- case jsonValueType:
- // JSON can accurately represent 32-bit uints as floating point values.
- if i.isJSONSafe() {
- return structpb.NewNumberValue(float64(i)), nil
- }
- // Proto3 to JSON conversion requires string-formatted uint64 values
- // since the conversion to floating point would result in truncation.
- return structpb.NewStringValue(strconv.FormatUint(uint64(i), 10)), nil
- case uint32WrapperType:
- // Convert the value to a wrapperspb.UInt32Value, error on overflow.
- v, err := uint64ToUint32Checked(uint64(i))
- if err != nil {
- return 0, err
- }
- return wrapperspb.UInt32(v), nil
- case uint64WrapperType:
- // Convert the value to a wrapperspb.UInt64Value.
- return wrapperspb.UInt64(uint64(i)), nil
- }
- switch typeDesc.Elem().Kind() {
- case reflect.Uint32:
- v, err := uint64ToUint32Checked(uint64(i))
- if err != nil {
- return 0, err
- }
- p := reflect.New(typeDesc.Elem())
- p.Elem().Set(reflect.ValueOf(v).Convert(typeDesc.Elem()))
- return p.Interface(), nil
- case reflect.Uint64:
- v := uint64(i)
- p := reflect.New(typeDesc.Elem())
- p.Elem().Set(reflect.ValueOf(v).Convert(typeDesc.Elem()))
- return p.Interface(), nil
- }
- case reflect.Interface:
- iv := i.Value()
- if reflect.TypeOf(iv).Implements(typeDesc) {
- return iv, nil
- }
- if reflect.TypeOf(i).Implements(typeDesc) {
- return i, nil
- }
- }
- return nil, fmt.Errorf("unsupported type conversion from 'uint' to %v", typeDesc)
-}
-
-// ConvertToType implements ref.Val.ConvertToType.
-func (i Uint) ConvertToType(typeVal ref.Type) ref.Val {
- switch typeVal {
- case IntType:
- v, err := uint64ToInt64Checked(uint64(i))
- if err != nil {
- return WrapErr(err)
- }
- return Int(v)
- case UintType:
- return i
- case DoubleType:
- return Double(i)
- case StringType:
- return String(fmt.Sprintf("%d", uint64(i)))
- case TypeType:
- return UintType
- }
- return NewErr("type conversion error from '%s' to '%s'", UintType, typeVal)
-}
-
-// Divide implements traits.Divider.Divide.
-func (i Uint) Divide(other ref.Val) ref.Val {
- otherUint, ok := other.(Uint)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- div, err := divideUint64Checked(uint64(i), uint64(otherUint))
- if err != nil {
- return WrapErr(err)
- }
- return Uint(div)
-}
-
-// Equal implements ref.Val.Equal.
-func (i Uint) Equal(other ref.Val) ref.Val {
- switch ov := other.(type) {
- case Double:
- if math.IsNaN(float64(ov)) {
- return False
- }
- return Bool(compareUintDouble(i, ov) == 0)
- case Int:
- return Bool(compareUintInt(i, ov) == 0)
- case Uint:
- return Bool(i == ov)
- default:
- return False
- }
-}
-
-// IsZeroValue returns true if the uint is zero.
-func (i Uint) IsZeroValue() bool {
- return i == 0
-}
-
-// Modulo implements traits.Modder.Modulo.
-func (i Uint) Modulo(other ref.Val) ref.Val {
- otherUint, ok := other.(Uint)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- mod, err := moduloUint64Checked(uint64(i), uint64(otherUint))
- if err != nil {
- return WrapErr(err)
- }
- return Uint(mod)
-}
-
-// Multiply implements traits.Multiplier.Multiply.
-func (i Uint) Multiply(other ref.Val) ref.Val {
- otherUint, ok := other.(Uint)
- if !ok {
- return MaybeNoSuchOverloadErr(other)
- }
- val, err := multiplyUint64Checked(uint64(i), uint64(otherUint))
- if err != nil {
- return WrapErr(err)
- }
- return Uint(val)
-}
-
-// Subtract implements traits.Subtractor.Subtract.
-func (i Uint) Subtract(subtrahend ref.Val) ref.Val {
- subtraUint, ok := subtrahend.(Uint)
- if !ok {
- return MaybeNoSuchOverloadErr(subtrahend)
- }
- val, err := subtractUint64Checked(uint64(i), uint64(subtraUint))
- if err != nil {
- return WrapErr(err)
- }
- return Uint(val)
-}
-
-// Type implements ref.Val.Type.
-func (i Uint) Type() ref.Type {
- return UintType
-}
-
-// Value implements ref.Val.Value.
-func (i Uint) Value() any {
- return uint64(i)
-}
-
-// isJSONSafe indicates whether the uint is safely representable as a floating point value in JSON.
-func (i Uint) isJSONSafe() bool {
- return i <= maxIntJSON
-}
diff --git a/vendor/github.com/google/cel-go/common/types/unknown.go b/vendor/github.com/google/cel-go/common/types/unknown.go
deleted file mode 100644
index 9dd2b2579..000000000
--- a/vendor/github.com/google/cel-go/common/types/unknown.go
+++ /dev/null
@@ -1,326 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "fmt"
- "math"
- "reflect"
- "sort"
- "strings"
- "unicode"
-
- "github.com/google/cel-go/common/types/ref"
-)
-
-var (
- unspecifiedAttribute = &AttributeTrail{qualifierPath: []any{}}
-)
-
-// NewAttributeTrail creates a new simple attribute from a variable name.
-func NewAttributeTrail(variable string) *AttributeTrail {
- if variable == "" {
- return unspecifiedAttribute
- }
- return &AttributeTrail{variable: variable}
-}
-
-// AttributeTrail specifies a variable with an optional qualifier path. An attribute value is expected to
-// correspond to an AbsoluteAttribute, meaning a field selection which starts with a top-level variable.
-//
-// The qualifer path elements adhere to the AttributeQualifier type constraint.
-type AttributeTrail struct {
- variable string
- qualifierPath []any
-}
-
-// Equal returns whether two attribute values have the same variable name and qualifier paths.
-func (a *AttributeTrail) Equal(other *AttributeTrail) bool {
- if a.Variable() != other.Variable() || len(a.QualifierPath()) != len(other.QualifierPath()) {
- return false
- }
- for i, q := range a.QualifierPath() {
- qual := other.QualifierPath()[i]
- if !qualifiersEqual(q, qual) {
- return false
- }
- }
- return true
-}
-
-func qualifiersEqual(a, b any) bool {
- if a == b {
- return true
- }
- switch numA := a.(type) {
- case int64:
- numB, ok := b.(uint64)
- if !ok {
- return false
- }
- return intUintEqual(numA, numB)
- case uint64:
- numB, ok := b.(int64)
- if !ok {
- return false
- }
- return intUintEqual(numB, numA)
- default:
- return false
- }
-}
-
-func intUintEqual(i int64, u uint64) bool {
- if i < 0 || u > math.MaxInt64 {
- return false
- }
- return i == int64(u)
-}
-
-// Variable returns the variable name associated with the attribute.
-func (a *AttributeTrail) Variable() string {
- return a.variable
-}
-
-// QualifierPath returns the optional set of qualifying fields or indices applied to the variable.
-func (a *AttributeTrail) QualifierPath() []any {
- return a.qualifierPath
-}
-
-// String returns the string representation of the Attribute.
-func (a *AttributeTrail) String() string {
- if a.variable == "" {
- return ""
- }
- var str strings.Builder
- str.WriteString(a.variable)
- for _, q := range a.qualifierPath {
- switch q := q.(type) {
- case bool, int64:
- str.WriteString(fmt.Sprintf("[%v]", q))
- case uint64:
- str.WriteString(fmt.Sprintf("[%vu]", q))
- case string:
- if isIdentifierCharacter(q) {
- str.WriteString(fmt.Sprintf(".%v", q))
- } else {
- str.WriteString(fmt.Sprintf("[%q]", q))
- }
- }
- }
- return str.String()
-}
-
-func isIdentifierCharacter(str string) bool {
- for _, c := range str {
- if unicode.IsLetter(c) || unicode.IsDigit(c) || string(c) == "_" {
- continue
- }
- return false
- }
- return true
-}
-
-// AttributeQualifier constrains the possible types which may be used to qualify an attribute.
-type AttributeQualifier interface {
- bool | int64 | uint64 | string
-}
-
-// QualifyAttribute qualifies an attribute using a valid AttributeQualifier type.
-func QualifyAttribute[T AttributeQualifier](attr *AttributeTrail, qualifier T) *AttributeTrail {
- attr.qualifierPath = append(attr.qualifierPath, qualifier)
- return attr
-}
-
-// Unknown type which collects expression ids which caused the current value to become unknown.
-type Unknown struct {
- attributeTrails map[int64][]*AttributeTrail
-}
-
-// NewUnknown creates a new unknown at a given expression id for an attribute.
-//
-// If the attribute is nil, the attribute value will be the `unspecifiedAttribute`.
-func NewUnknown(id int64, attr *AttributeTrail) *Unknown {
- if attr == nil {
- attr = unspecifiedAttribute
- }
- return &Unknown{
- attributeTrails: map[int64][]*AttributeTrail{id: {attr}},
- }
-}
-
-// IDs returns the set of unknown expression ids contained by this value.
-//
-// Numeric identifiers are guaranteed to be in sorted order.
-func (u *Unknown) IDs() []int64 {
- ids := make(int64Slice, len(u.attributeTrails))
- i := 0
- for id := range u.attributeTrails {
- ids[i] = id
- i++
- }
- ids.Sort()
- return ids
-}
-
-// GetAttributeTrails returns the attribute trails, if present, missing for a given expression id.
-func (u *Unknown) GetAttributeTrails(id int64) ([]*AttributeTrail, bool) {
- trails, found := u.attributeTrails[id]
- return trails, found
-}
-
-// Contains returns true if the input unknown is a subset of the current unknown.
-func (u *Unknown) Contains(other *Unknown) bool {
- for id, otherTrails := range other.attributeTrails {
- trails, found := u.attributeTrails[id]
- if !found || len(otherTrails) != len(trails) {
- return false
- }
- for _, ot := range otherTrails {
- found := false
- for _, t := range trails {
- if t.Equal(ot) {
- found = true
- break
- }
- }
- if !found {
- return false
- }
- }
- }
- return true
-}
-
-// ConvertToNative implements ref.Val.ConvertToNative.
-func (u *Unknown) ConvertToNative(typeDesc reflect.Type) (any, error) {
- return u.Value(), nil
-}
-
-// ConvertToType is an identity function since unknown values cannot be modified.
-func (u *Unknown) ConvertToType(typeVal ref.Type) ref.Val {
- return u
-}
-
-// Equal is an identity function since unknown values cannot be modified.
-func (u *Unknown) Equal(other ref.Val) ref.Val {
- return u
-}
-
-// String implements the Stringer interface
-func (u *Unknown) String() string {
- var str strings.Builder
- for id, attrs := range u.attributeTrails {
- if str.Len() != 0 {
- str.WriteString(", ")
- }
- if len(attrs) == 1 {
- str.WriteString(fmt.Sprintf("%v (%d)", attrs[0], id))
- } else {
- str.WriteString(fmt.Sprintf("%v (%d)", attrs, id))
- }
- }
- return str.String()
-}
-
-// Type implements ref.Val.Type.
-func (u *Unknown) Type() ref.Type {
- return UnknownType
-}
-
-// Value implements ref.Val.Value.
-func (u *Unknown) Value() any {
- return u
-}
-
-// IsUnknown returns whether the element ref.Val is in instance of *types.Unknown
-func IsUnknown(val ref.Val) bool {
- switch val.(type) {
- case *Unknown:
- return true
- default:
- return false
- }
-}
-
-// MaybeMergeUnknowns determines whether an input value and another, possibly nil, unknown will produce
-// an unknown result.
-//
-// If the input `val` is another Unknown, then the result will be the merge of the `val` and the input
-// `unk`. If the `val` is not unknown, then the result will depend on whether the input `unk` is nil.
-// If both values are non-nil and unknown, then the return value will be a merge of both unknowns.
-func MaybeMergeUnknowns(val ref.Val, unk *Unknown) (*Unknown, bool) {
- src, isUnk := val.(*Unknown)
- if !isUnk {
- if unk != nil {
- return unk, true
- }
- return unk, false
- }
- return MergeUnknowns(src, unk), true
-}
-
-// MergeUnknowns combines two unknown values into a new unknown value.
-func MergeUnknowns(unk1, unk2 *Unknown) *Unknown {
- if unk1 == nil {
- return unk2
- }
- if unk2 == nil {
- return unk1
- }
- out := &Unknown{
- attributeTrails: make(map[int64][]*AttributeTrail, len(unk1.attributeTrails)+len(unk2.attributeTrails)),
- }
- for id, ats := range unk1.attributeTrails {
- out.attributeTrails[id] = ats
- }
- for id, ats := range unk2.attributeTrails {
- existing, found := out.attributeTrails[id]
- if !found {
- out.attributeTrails[id] = ats
- continue
- }
-
- for _, at := range ats {
- found := false
- for _, et := range existing {
- if at.Equal(et) {
- found = true
- break
- }
- }
- if !found {
- existing = append(existing, at)
- }
- }
- out.attributeTrails[id] = existing
- }
- return out
-}
-
-// int64Slice is an implementation of the sort.Interface
-type int64Slice []int64
-
-// Len returns the number of elements in the slice.
-func (x int64Slice) Len() int { return len(x) }
-
-// Less indicates whether the value at index i is less than the value at index j.
-func (x int64Slice) Less(i, j int) bool { return x[i] < x[j] }
-
-// Swap swaps the values at indices i and j in place.
-func (x int64Slice) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
-
-// Sort is a convenience method: x.Sort() calls Sort(x).
-func (x int64Slice) Sort() { sort.Sort(x) }
diff --git a/vendor/github.com/google/cel-go/common/types/util.go b/vendor/github.com/google/cel-go/common/types/util.go
deleted file mode 100644
index 71662eee3..000000000
--- a/vendor/github.com/google/cel-go/common/types/util.go
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package types
-
-import (
- "github.com/google/cel-go/common/types/ref"
-)
-
-// IsUnknownOrError returns whether the input element ref.Val is an ErrType or UnknownType.
-func IsUnknownOrError(val ref.Val) bool {
- switch val.(type) {
- case *Unknown, *Err:
- return true
- }
- return false
-}
-
-// IsPrimitiveType returns whether the input element ref.Val is a primitive type.
-// Note, primitive types do not include well-known types such as Duration and Timestamp.
-func IsPrimitiveType(val ref.Val) bool {
- switch val.Type() {
- case BoolType, BytesType, DoubleType, IntType, StringType, UintType:
- return true
- }
- return false
-}
-
-// Equal returns whether the two ref.Value are heterogeneously equivalent.
-func Equal(lhs ref.Val, rhs ref.Val) ref.Val {
- lNull := lhs == NullValue
- rNull := rhs == NullValue
- if lNull || rNull {
- return Bool(lNull == rNull)
- }
- return lhs.Equal(rhs)
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/BUILD.bazel b/vendor/github.com/google/cel-go/interpreter/BUILD.bazel
deleted file mode 100644
index 220e23d47..000000000
--- a/vendor/github.com/google/cel-go/interpreter/BUILD.bazel
+++ /dev/null
@@ -1,74 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "activation.go",
- "attribute_patterns.go",
- "attributes.go",
- "decorators.go",
- "dispatcher.go",
- "evalstate.go",
- "interpretable.go",
- "interpreter.go",
- "optimizations.go",
- "planner.go",
- "prune.go",
- "runtimecost.go",
- ],
- importpath = "github.com/google/cel-go/interpreter",
- deps = [
- "//common:go_default_library",
- "//common/ast:go_default_library",
- "//common/containers:go_default_library",
- "//common/functions:go_default_library",
- "//common/operators:go_default_library",
- "//common/overloads:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- "//common/types/traits:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//types/known/durationpb:go_default_library",
- "@org_golang_google_protobuf//types/known/structpb:go_default_library",
- "@org_golang_google_protobuf//types/known/timestamppb:go_default_library",
- "@org_golang_google_protobuf//types/known/wrapperspb:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- srcs = [
- "activation_test.go",
- "attribute_patterns_test.go",
- "attributes_test.go",
- "interpreter_test.go",
- "prune_test.go",
- "runtimecost_test.go",
- ],
- embed = [
- ":go_default_library",
- ],
- deps = [
- "//checker:go_default_library",
- "//common/containers:go_default_library",
- "//common/debug:go_default_library",
- "//common/decls:go_default_library",
- "//common/functions:go_default_library",
- "//common/operators:go_default_library",
- "//common/stdlib:go_default_library",
- "//common/types:go_default_library",
- "//parser:go_default_library",
- "//test:go_default_library",
- "//test/proto2pb:go_default_library",
- "//test/proto3pb:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//types/known/anypb:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/interpreter/activation.go b/vendor/github.com/google/cel-go/interpreter/activation.go
deleted file mode 100644
index a80264451..000000000
--- a/vendor/github.com/google/cel-go/interpreter/activation.go
+++ /dev/null
@@ -1,201 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "errors"
- "fmt"
- "sync"
-
- "github.com/google/cel-go/common/types/ref"
-)
-
-// Activation used to resolve identifiers by name and references by id.
-//
-// An Activation is the primary mechanism by which a caller supplies input into a CEL program.
-type Activation interface {
- // ResolveName returns a value from the activation by qualified name, or false if the name
- // could not be found.
- ResolveName(name string) (any, bool)
-
- // Parent returns the parent of the current activation, may be nil.
- // If non-nil, the parent will be searched during resolve calls.
- Parent() Activation
-}
-
-// EmptyActivation returns a variable-free activation.
-func EmptyActivation() Activation {
- return emptyActivation{}
-}
-
-// emptyActivation is a variable-free activation.
-type emptyActivation struct{}
-
-func (emptyActivation) ResolveName(string) (any, bool) { return nil, false }
-func (emptyActivation) Parent() Activation { return nil }
-
-// NewActivation returns an activation based on a map-based binding where the map keys are
-// expected to be qualified names used with ResolveName calls.
-//
-// The input `bindings` may either be of type `Activation` or `map[string]any`.
-//
-// Lazy bindings may be supplied within the map-based input in either of the following forms:
-// - func() any
-// - func() ref.Val
-//
-// The output of the lazy binding will overwrite the variable reference in the internal map.
-//
-// Values which are not represented as ref.Val types on input may be adapted to a ref.Val using
-// the types.Adapter configured in the environment.
-func NewActivation(bindings any) (Activation, error) {
- if bindings == nil {
- return nil, errors.New("bindings must be non-nil")
- }
- a, isActivation := bindings.(Activation)
- if isActivation {
- return a, nil
- }
- m, isMap := bindings.(map[string]any)
- if !isMap {
- return nil, fmt.Errorf(
- "activation input must be an activation or map[string]interface: got %T",
- bindings)
- }
- return &mapActivation{bindings: m}, nil
-}
-
-// mapActivation which implements Activation and maps of named values.
-//
-// Named bindings may lazily supply values by providing a function which accepts no arguments and
-// produces an interface value.
-type mapActivation struct {
- bindings map[string]any
-}
-
-// Parent implements the Activation interface method.
-func (a *mapActivation) Parent() Activation {
- return nil
-}
-
-// ResolveName implements the Activation interface method.
-func (a *mapActivation) ResolveName(name string) (any, bool) {
- obj, found := a.bindings[name]
- if !found {
- return nil, false
- }
- fn, isLazy := obj.(func() ref.Val)
- if isLazy {
- obj = fn()
- a.bindings[name] = obj
- }
- fnRaw, isLazy := obj.(func() any)
- if isLazy {
- obj = fnRaw()
- a.bindings[name] = obj
- }
- return obj, found
-}
-
-// hierarchicalActivation which implements Activation and contains a parent and
-// child activation.
-type hierarchicalActivation struct {
- parent Activation
- child Activation
-}
-
-// Parent implements the Activation interface method.
-func (a *hierarchicalActivation) Parent() Activation {
- return a.parent
-}
-
-// ResolveName implements the Activation interface method.
-func (a *hierarchicalActivation) ResolveName(name string) (any, bool) {
- if object, found := a.child.ResolveName(name); found {
- return object, found
- }
- return a.parent.ResolveName(name)
-}
-
-// NewHierarchicalActivation takes two activations and produces a new one which prioritizes
-// resolution in the child first and parent(s) second.
-func NewHierarchicalActivation(parent Activation, child Activation) Activation {
- return &hierarchicalActivation{parent, child}
-}
-
-// NewPartialActivation returns an Activation which contains a list of AttributePattern values
-// representing field and index operations that should result in a 'types.Unknown' result.
-//
-// The `bindings` value may be any value type supported by the interpreter.NewActivation call,
-// but is typically either an existing Activation or map[string]any.
-func NewPartialActivation(bindings any,
- unknowns ...*AttributePattern) (PartialActivation, error) {
- a, err := NewActivation(bindings)
- if err != nil {
- return nil, err
- }
- return &partActivation{Activation: a, unknowns: unknowns}, nil
-}
-
-// PartialActivation extends the Activation interface with a set of UnknownAttributePatterns.
-type PartialActivation interface {
- Activation
-
- // UnknownAttributePaths returns a set of AttributePattern values which match Attribute
- // expressions for data accesses whose values are not yet known.
- UnknownAttributePatterns() []*AttributePattern
-}
-
-// partActivation is the default implementations of the PartialActivation interface.
-type partActivation struct {
- Activation
- unknowns []*AttributePattern
-}
-
-// UnknownAttributePatterns implements the PartialActivation interface method.
-func (a *partActivation) UnknownAttributePatterns() []*AttributePattern {
- return a.unknowns
-}
-
-// varActivation represents a single mutable variable binding.
-//
-// This activation type should only be used within folds as the fold loop controls the object
-// life-cycle.
-type varActivation struct {
- parent Activation
- name string
- val ref.Val
-}
-
-// Parent implements the Activation interface method.
-func (v *varActivation) Parent() Activation {
- return v.parent
-}
-
-// ResolveName implements the Activation interface method.
-func (v *varActivation) ResolveName(name string) (any, bool) {
- if name == v.name {
- return v.val, true
- }
- return v.parent.ResolveName(name)
-}
-
-var (
- // pool of var activations to reduce allocations during folds.
- varActivationPool = &sync.Pool{
- New: func() any {
- return &varActivation{}
- },
- }
-)
diff --git a/vendor/github.com/google/cel-go/interpreter/attribute_patterns.go b/vendor/github.com/google/cel-go/interpreter/attribute_patterns.go
deleted file mode 100644
index 1fbaaf17e..000000000
--- a/vendor/github.com/google/cel-go/interpreter/attribute_patterns.go
+++ /dev/null
@@ -1,399 +0,0 @@
-// Copyright 2020 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-// AttributePattern represents a top-level variable with an optional set of qualifier patterns.
-//
-// When using a CEL expression within a container, e.g. a package or namespace, the variable name
-// in the pattern must match the qualified name produced during the variable namespace resolution.
-// For example, if variable `c` appears in an expression whose container is `a.b`, the variable
-// name supplied to the pattern must be `a.b.c`
-//
-// The qualifier patterns for attribute matching must be one of the following:
-//
-// - valid map key type: string, int, uint, bool
-// - wildcard (*)
-//
-// Examples:
-//
-// 1. ns.myvar["complex-value"]
-// 2. ns.myvar["complex-value"][0]
-// 3. ns.myvar["complex-value"].*.name
-//
-// The first example is simple: match an attribute where the variable is 'ns.myvar' with a
-// field access on 'complex-value'. The second example expands the match to indicate that only
-// a specific index `0` should match. And lastly, the third example matches any indexed access
-// that later selects the 'name' field.
-type AttributePattern struct {
- variable string
- qualifierPatterns []*AttributeQualifierPattern
-}
-
-// NewAttributePattern produces a new mutable AttributePattern based on a variable name.
-func NewAttributePattern(variable string) *AttributePattern {
- return &AttributePattern{
- variable: variable,
- qualifierPatterns: []*AttributeQualifierPattern{},
- }
-}
-
-// QualString adds a string qualifier pattern to the AttributePattern. The string may be a valid
-// identifier, or string map key including empty string.
-func (apat *AttributePattern) QualString(pattern string) *AttributePattern {
- apat.qualifierPatterns = append(apat.qualifierPatterns,
- &AttributeQualifierPattern{value: pattern})
- return apat
-}
-
-// QualInt adds an int qualifier pattern to the AttributePattern. The index may be either a map or
-// list index.
-func (apat *AttributePattern) QualInt(pattern int64) *AttributePattern {
- apat.qualifierPatterns = append(apat.qualifierPatterns,
- &AttributeQualifierPattern{value: pattern})
- return apat
-}
-
-// QualUint adds an uint qualifier pattern for a map index operation to the AttributePattern.
-func (apat *AttributePattern) QualUint(pattern uint64) *AttributePattern {
- apat.qualifierPatterns = append(apat.qualifierPatterns,
- &AttributeQualifierPattern{value: pattern})
- return apat
-}
-
-// QualBool adds a bool qualifier pattern for a map index operation to the AttributePattern.
-func (apat *AttributePattern) QualBool(pattern bool) *AttributePattern {
- apat.qualifierPatterns = append(apat.qualifierPatterns,
- &AttributeQualifierPattern{value: pattern})
- return apat
-}
-
-// Wildcard adds a special sentinel qualifier pattern that will match any single qualifier.
-func (apat *AttributePattern) Wildcard() *AttributePattern {
- apat.qualifierPatterns = append(apat.qualifierPatterns,
- &AttributeQualifierPattern{wildcard: true})
- return apat
-}
-
-// VariableMatches returns true if the fully qualified variable matches the AttributePattern
-// fully qualified variable name.
-func (apat *AttributePattern) VariableMatches(variable string) bool {
- return apat.variable == variable
-}
-
-// QualifierPatterns returns the set of AttributeQualifierPattern values on the AttributePattern.
-func (apat *AttributePattern) QualifierPatterns() []*AttributeQualifierPattern {
- return apat.qualifierPatterns
-}
-
-// AttributeQualifierPattern holds a wildcard or valued qualifier pattern.
-type AttributeQualifierPattern struct {
- wildcard bool
- value any
-}
-
-// Matches returns true if the qualifier pattern is a wildcard, or the Qualifier implements the
-// qualifierValueEquator interface and its IsValueEqualTo returns true for the qualifier pattern.
-func (qpat *AttributeQualifierPattern) Matches(q Qualifier) bool {
- if qpat.wildcard {
- return true
- }
- qve, ok := q.(qualifierValueEquator)
- return ok && qve.QualifierValueEquals(qpat.value)
-}
-
-// qualifierValueEquator defines an interface for determining if an input value, of valid map key
-// type, is equal to the value held in the Qualifier. This interface is used by the
-// AttributeQualifierPattern to determine pattern matches for non-wildcard qualifier patterns.
-//
-// Note: Attribute values are also Qualifier values; however, Attributes are resolved before
-// qualification happens. This is an implementation detail, but one relevant to why the Attribute
-// types do not surface in the list of implementations.
-//
-// See: partialAttributeFactory.matchesUnknownPatterns for more details on how this interface is
-// used.
-type qualifierValueEquator interface {
- // QualifierValueEquals returns true if the input value is equal to the value held in the
- // Qualifier.
- QualifierValueEquals(value any) bool
-}
-
-// QualifierValueEquals implementation for boolean qualifiers.
-func (q *boolQualifier) QualifierValueEquals(value any) bool {
- bval, ok := value.(bool)
- return ok && q.value == bval
-}
-
-// QualifierValueEquals implementation for field qualifiers.
-func (q *fieldQualifier) QualifierValueEquals(value any) bool {
- sval, ok := value.(string)
- return ok && q.Name == sval
-}
-
-// QualifierValueEquals implementation for string qualifiers.
-func (q *stringQualifier) QualifierValueEquals(value any) bool {
- sval, ok := value.(string)
- return ok && q.value == sval
-}
-
-// QualifierValueEquals implementation for int qualifiers.
-func (q *intQualifier) QualifierValueEquals(value any) bool {
- return numericValueEquals(value, q.celValue)
-}
-
-// QualifierValueEquals implementation for uint qualifiers.
-func (q *uintQualifier) QualifierValueEquals(value any) bool {
- return numericValueEquals(value, q.celValue)
-}
-
-// QualifierValueEquals implementation for double qualifiers.
-func (q *doubleQualifier) QualifierValueEquals(value any) bool {
- return numericValueEquals(value, q.celValue)
-}
-
-// numericValueEquals uses CEL equality to determine whether two number values are
-func numericValueEquals(value any, celValue ref.Val) bool {
- val := types.DefaultTypeAdapter.NativeToValue(value)
- return celValue.Equal(val) == types.True
-}
-
-// NewPartialAttributeFactory returns an AttributeFactory implementation capable of performing
-// AttributePattern matches with PartialActivation inputs.
-func NewPartialAttributeFactory(container *containers.Container,
- adapter types.Adapter,
- provider types.Provider) AttributeFactory {
- fac := NewAttributeFactory(container, adapter, provider)
- return &partialAttributeFactory{
- AttributeFactory: fac,
- container: container,
- adapter: adapter,
- provider: provider,
- }
-}
-
-type partialAttributeFactory struct {
- AttributeFactory
- container *containers.Container
- adapter types.Adapter
- provider types.Provider
-}
-
-// AbsoluteAttribute implementation of the AttributeFactory interface which wraps the
-// NamespacedAttribute resolution in an internal attributeMatcher object to dynamically match
-// unknown patterns from PartialActivation inputs if given.
-func (fac *partialAttributeFactory) AbsoluteAttribute(id int64, names ...string) NamespacedAttribute {
- attr := fac.AttributeFactory.AbsoluteAttribute(id, names...)
- return &attributeMatcher{fac: fac, NamespacedAttribute: attr}
-}
-
-// MaybeAttribute implementation of the AttributeFactory interface which ensure that the set of
-// 'maybe' NamespacedAttribute values are produced using the partialAttributeFactory rather than
-// the base AttributeFactory implementation.
-func (fac *partialAttributeFactory) MaybeAttribute(id int64, name string) Attribute {
- return &maybeAttribute{
- id: id,
- attrs: []NamespacedAttribute{
- fac.AbsoluteAttribute(id, fac.container.ResolveCandidateNames(name)...),
- },
- adapter: fac.adapter,
- provider: fac.provider,
- fac: fac,
- }
-}
-
-// matchesUnknownPatterns returns true if the variable names and qualifiers for a given
-// Attribute value match any of the ActivationPattern objects in the set of unknown activation
-// patterns on the given PartialActivation.
-//
-// For example, in the expression `a.b`, the Attribute is composed of variable `a`, with string
-// qualifier `b`. When a PartialActivation is supplied, it indicates that some or all of the data
-// provided in the input is unknown by specifying unknown AttributePatterns. An AttributePattern
-// that refers to variable `a` with a string qualifier of `c` will not match `a.b`; however, any
-// of the following patterns will match Attribute `a.b`:
-//
-// - `AttributePattern("a")`
-// - `AttributePattern("a").Wildcard()`
-// - `AttributePattern("a").QualString("b")`
-// - `AttributePattern("a").QualString("b").QualInt(0)`
-//
-// Any AttributePattern which overlaps an Attribute or vice-versa will produce an Unknown result
-// for the last pattern matched variable or qualifier in the Attribute. In the first matching
-// example, the expression id representing variable `a` would be listed in the Unknown result,
-// whereas in the other pattern examples, the qualifier `b` would be returned as the Unknown.
-func (fac *partialAttributeFactory) matchesUnknownPatterns(
- vars PartialActivation,
- attrID int64,
- variableNames []string,
- qualifiers []Qualifier) (*types.Unknown, error) {
- patterns := vars.UnknownAttributePatterns()
- candidateIndices := map[int]struct{}{}
- for _, variable := range variableNames {
- for i, pat := range patterns {
- if pat.VariableMatches(variable) {
- if len(qualifiers) == 0 {
- return types.NewUnknown(attrID, types.NewAttributeTrail(variable)), nil
- }
- candidateIndices[i] = struct{}{}
- }
- }
- }
- // Determine whether to return early if there are no candidate unknown patterns.
- if len(candidateIndices) == 0 {
- return nil, nil
- }
- // Resolve the attribute qualifiers into a static set. This prevents more dynamic
- // Attribute resolutions than necessary when there are multiple unknown patterns
- // that traverse the same Attribute-based qualifier field.
- newQuals := make([]Qualifier, len(qualifiers))
- for i, qual := range qualifiers {
- attr, isAttr := qual.(Attribute)
- if isAttr {
- val, err := attr.Resolve(vars)
- if err != nil {
- return nil, err
- }
- // If this resolution behavior ever changes, new implementations of the
- // qualifierValueEquator may be required to handle proper resolution.
- qual, err = fac.NewQualifier(nil, qual.ID(), val, attr.IsOptional())
- if err != nil {
- return nil, err
- }
- }
- newQuals[i] = qual
- }
- // Determine whether any of the unknown patterns match.
- for patIdx := range candidateIndices {
- pat := patterns[patIdx]
- isUnk := true
- matchExprID := attrID
- qualPats := pat.QualifierPatterns()
- for i, qual := range newQuals {
- if i >= len(qualPats) {
- break
- }
- matchExprID = qual.ID()
- qualPat := qualPats[i]
- // Note, the AttributeQualifierPattern relies on the input Qualifier not being an
- // Attribute, since there is no way to resolve the Attribute with the information
- // provided to the Matches call.
- if !qualPat.Matches(qual) {
- isUnk = false
- break
- }
- }
- if isUnk {
- attr := types.NewAttributeTrail(pat.variable)
- for i := 0; i < len(qualPats) && i < len(newQuals); i++ {
- if qual, ok := newQuals[i].(ConstantQualifier); ok {
- switch v := qual.Value().Value().(type) {
- case bool:
- types.QualifyAttribute[bool](attr, v)
- case float64:
- types.QualifyAttribute[int64](attr, int64(v))
- case int64:
- types.QualifyAttribute[int64](attr, v)
- case string:
- types.QualifyAttribute[string](attr, v)
- case uint64:
- types.QualifyAttribute[uint64](attr, v)
- default:
- types.QualifyAttribute[string](attr, fmt.Sprintf("%v", v))
- }
- } else {
- types.QualifyAttribute[string](attr, "*")
- }
- }
- return types.NewUnknown(matchExprID, attr), nil
- }
- }
- return nil, nil
-}
-
-// attributeMatcher embeds the NamespacedAttribute interface which allows it to participate in
-// AttributePattern matching against Attribute values without having to modify the code paths that
-// identify Attributes in expressions.
-type attributeMatcher struct {
- NamespacedAttribute
- qualifiers []Qualifier
- fac *partialAttributeFactory
-}
-
-// AddQualifier implements the Attribute interface method.
-func (m *attributeMatcher) AddQualifier(qual Qualifier) (Attribute, error) {
- // Add the qualifier to the embedded NamespacedAttribute. If the input to the Resolve
- // method is not a PartialActivation, or does not match an unknown attribute pattern, the
- // Resolve method is directly invoked on the underlying NamespacedAttribute.
- _, err := m.NamespacedAttribute.AddQualifier(qual)
- if err != nil {
- return nil, err
- }
- // The attributeMatcher overloads TryResolve and will attempt to match unknown patterns against
- // the variable name and qualifier set contained within the Attribute. These values are not
- // directly inspectable on the top-level NamespacedAttribute interface and so are tracked within
- // the attributeMatcher.
- m.qualifiers = append(m.qualifiers, qual)
- return m, nil
-}
-
-// Resolve is an implementation of the NamespacedAttribute interface method which tests
-// for matching unknown attribute patterns and returns types.Unknown if present. Otherwise,
-// the standard Resolve logic applies.
-func (m *attributeMatcher) Resolve(vars Activation) (any, error) {
- id := m.NamespacedAttribute.ID()
- // Bug in how partial activation is resolved, should search parents as well.
- partial, isPartial := toPartialActivation(vars)
- if isPartial {
- unk, err := m.fac.matchesUnknownPatterns(
- partial,
- id,
- m.CandidateVariableNames(),
- m.qualifiers)
- if err != nil {
- return nil, err
- }
- if unk != nil {
- return unk, nil
- }
- }
- return m.NamespacedAttribute.Resolve(vars)
-}
-
-// Qualify is an implementation of the Qualifier interface method.
-func (m *attributeMatcher) Qualify(vars Activation, obj any) (any, error) {
- return attrQualify(m.fac, vars, obj, m)
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (m *attributeMatcher) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return attrQualifyIfPresent(m.fac, vars, obj, m, presenceOnly)
-}
-
-func toPartialActivation(vars Activation) (PartialActivation, bool) {
- pv, ok := vars.(PartialActivation)
- if ok {
- return pv, true
- }
- if vars.Parent() != nil {
- return toPartialActivation(vars.Parent())
- }
- return nil, false
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/attributes.go b/vendor/github.com/google/cel-go/interpreter/attributes.go
deleted file mode 100644
index ca97bdfcf..000000000
--- a/vendor/github.com/google/cel-go/interpreter/attributes.go
+++ /dev/null
@@ -1,1337 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "fmt"
- "strings"
-
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-)
-
-// AttributeFactory provides methods creating Attribute and Qualifier values.
-type AttributeFactory interface {
- // AbsoluteAttribute creates an attribute that refers to a top-level variable name.
- //
- // Checked expressions generate absolute attribute with a single name.
- // Parse-only expressions may have more than one possible absolute identifier when the
- // expression is created within a container, e.g. package or namespace.
- //
- // When there is more than one name supplied to the AbsoluteAttribute call, the names
- // must be in CEL's namespace resolution order. The name arguments provided here are
- // returned in the same order as they were provided by the NamespacedAttribute
- // CandidateVariableNames method.
- AbsoluteAttribute(id int64, names ...string) NamespacedAttribute
-
- // ConditionalAttribute creates an attribute with two Attribute branches, where the Attribute
- // that is resolved depends on the boolean evaluation of the input 'expr'.
- ConditionalAttribute(id int64, expr Interpretable, t, f Attribute) Attribute
-
- // MaybeAttribute creates an attribute that refers to either a field selection or a namespaced
- // variable name.
- //
- // Only expressions which have not been type-checked may generate oneof attributes.
- MaybeAttribute(id int64, name string) Attribute
-
- // RelativeAttribute creates an attribute whose value is a qualification of a dynamic
- // computation rather than a static variable reference.
- RelativeAttribute(id int64, operand Interpretable) Attribute
-
- // NewQualifier creates a qualifier on the target object with a given value.
- //
- // The 'val' may be an Attribute or any proto-supported map key type: bool, int, string, uint.
- //
- // The qualifier may consider the object type being qualified, if present. If absent, the
- // qualification should be considered dynamic and the qualification should still work, though
- // it may be sub-optimal.
- NewQualifier(objType *types.Type, qualID int64, val any, opt bool) (Qualifier, error)
-}
-
-// Qualifier marker interface for designating different qualifier values and where they appear
-// within field selections and index call expressions (`_[_]`).
-type Qualifier interface {
- // ID where the qualifier appears within an expression.
- ID() int64
-
- // IsOptional specifies whether the qualifier is optional.
- // Instead of a direct qualification, an optional qualifier will be resolved via QualifyIfPresent
- // rather than Qualify. A non-optional qualifier may also be resolved through QualifyIfPresent if
- // the object to qualify is itself optional.
- IsOptional() bool
-
- // Qualify performs a qualification, e.g. field selection, on the input object and returns
- // the value of the access and whether the value was set. A non-nil value with a false presence
- // test result indicates that the value being returned is the default value.
- Qualify(vars Activation, obj any) (any, error)
-
- // QualifyIfPresent qualifies the object if the qualifier is declared or defined on the object.
- // The 'presenceOnly' flag indicates that the value is not necessary, just a boolean status as
- // to whether the qualifier is present.
- QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error)
-}
-
-// ConstantQualifier interface embeds the Qualifier interface and provides an option to inspect the
-// qualifier's constant value.
-//
-// Non-constant qualifiers are of Attribute type.
-type ConstantQualifier interface {
- Qualifier
-
- // Value returns the constant value associated with the qualifier.
- Value() ref.Val
-}
-
-// Attribute values are a variable or value with an optional set of qualifiers, such as field, key,
-// or index accesses.
-type Attribute interface {
- Qualifier
-
- // AddQualifier adds a qualifier on the Attribute or error if the qualification is not a valid qualifier type.
- AddQualifier(Qualifier) (Attribute, error)
-
- // Resolve returns the value of the Attribute and whether it was present given an Activation.
- // For objects which support safe traversal, the value may be non-nil and the presence flag be false.
- //
- // If an error is encountered during attribute resolution, it will be returned immediately.
- // If the attribute cannot be resolved within the Activation, the result must be: `nil`, `error`
- // with the error indicating which variable was missing.
- Resolve(Activation) (any, error)
-}
-
-// NamespacedAttribute values are a variable within a namespace, and an optional set of qualifiers
-// such as field, key, or index accesses.
-type NamespacedAttribute interface {
- Attribute
-
- // CandidateVariableNames returns the possible namespaced variable names for this Attribute in
- // the CEL namespace resolution order.
- CandidateVariableNames() []string
-
- // Qualifiers returns the list of qualifiers associated with the Attribute.
- Qualifiers() []Qualifier
-}
-
-// NewAttributeFactory returns a default AttributeFactory which is produces Attribute values
-// capable of resolving types by simple names and qualify the values using the supported qualifier
-// types: bool, int, string, and uint.
-func NewAttributeFactory(cont *containers.Container, a types.Adapter, p types.Provider) AttributeFactory {
- return &attrFactory{
- container: cont,
- adapter: a,
- provider: p,
- }
-}
-
-type attrFactory struct {
- container *containers.Container
- adapter types.Adapter
- provider types.Provider
-}
-
-// AbsoluteAttribute refers to a variable value and an optional qualifier path.
-//
-// The namespaceNames represent the names the variable could have based on namespace
-// resolution rules.
-func (r *attrFactory) AbsoluteAttribute(id int64, names ...string) NamespacedAttribute {
- return &absoluteAttribute{
- id: id,
- namespaceNames: names,
- qualifiers: []Qualifier{},
- adapter: r.adapter,
- provider: r.provider,
- fac: r,
- }
-}
-
-// ConditionalAttribute supports the case where an attribute selection may occur on a conditional
-// expression, e.g. (cond ? a : b).c
-func (r *attrFactory) ConditionalAttribute(id int64, expr Interpretable, t, f Attribute) Attribute {
- return &conditionalAttribute{
- id: id,
- expr: expr,
- truthy: t,
- falsy: f,
- adapter: r.adapter,
- fac: r,
- }
-}
-
-// MaybeAttribute collects variants of unchecked AbsoluteAttribute values which could either be
-// direct variable accesses or some combination of variable access with qualification.
-func (r *attrFactory) MaybeAttribute(id int64, name string) Attribute {
- return &maybeAttribute{
- id: id,
- attrs: []NamespacedAttribute{
- r.AbsoluteAttribute(id, r.container.ResolveCandidateNames(name)...),
- },
- adapter: r.adapter,
- provider: r.provider,
- fac: r,
- }
-}
-
-// RelativeAttribute refers to an expression and an optional qualifier path.
-func (r *attrFactory) RelativeAttribute(id int64, operand Interpretable) Attribute {
- return &relativeAttribute{
- id: id,
- operand: operand,
- qualifiers: []Qualifier{},
- adapter: r.adapter,
- fac: r,
- }
-}
-
-// NewQualifier is an implementation of the AttributeFactory interface.
-func (r *attrFactory) NewQualifier(objType *types.Type, qualID int64, val any, opt bool) (Qualifier, error) {
- // Before creating a new qualifier check to see if this is a protobuf message field access.
- // If so, use the precomputed GetFrom qualification method rather than the standard
- // stringQualifier.
- str, isStr := val.(string)
- if isStr && objType != nil && objType.Kind() == types.StructKind {
- ft, found := r.provider.FindStructFieldType(objType.TypeName(), str)
- if found && ft.IsSet != nil && ft.GetFrom != nil {
- return &fieldQualifier{
- id: qualID,
- Name: str,
- FieldType: ft,
- adapter: r.adapter,
- optional: opt,
- }, nil
- }
- }
- return newQualifier(r.adapter, qualID, val, opt)
-}
-
-type absoluteAttribute struct {
- id int64
- // namespaceNames represent the names the variable could have based on declared container
- // (package) of the expression.
- namespaceNames []string
- qualifiers []Qualifier
- adapter types.Adapter
- provider types.Provider
- fac AttributeFactory
-}
-
-// ID implements the Attribute interface method.
-func (a *absoluteAttribute) ID() int64 {
- qualCount := len(a.qualifiers)
- if qualCount == 0 {
- return a.id
- }
- return a.qualifiers[qualCount-1].ID()
-}
-
-// IsOptional returns trivially false for an attribute as the attribute represents a fully
-// qualified variable name. If the attribute is used in an optional manner, then an attrQualifier
-// is created and marks the attribute as optional.
-func (a *absoluteAttribute) IsOptional() bool {
- return false
-}
-
-// AddQualifier implements the Attribute interface method.
-func (a *absoluteAttribute) AddQualifier(qual Qualifier) (Attribute, error) {
- a.qualifiers = append(a.qualifiers, qual)
- return a, nil
-}
-
-// CandidateVariableNames implements the NamespaceAttribute interface method.
-func (a *absoluteAttribute) CandidateVariableNames() []string {
- return a.namespaceNames
-}
-
-// Qualifiers returns the list of Qualifier instances associated with the namespaced attribute.
-func (a *absoluteAttribute) Qualifiers() []Qualifier {
- return a.qualifiers
-}
-
-// Qualify is an implementation of the Qualifier interface method.
-func (a *absoluteAttribute) Qualify(vars Activation, obj any) (any, error) {
- return attrQualify(a.fac, vars, obj, a)
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (a *absoluteAttribute) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return attrQualifyIfPresent(a.fac, vars, obj, a, presenceOnly)
-}
-
-// String implements the Stringer interface method.
-func (a *absoluteAttribute) String() string {
- return fmt.Sprintf("id: %v, names: %v", a.id, a.namespaceNames)
-}
-
-// Resolve returns the resolved Attribute value given the Activation, or error if the Attribute
-// variable is not found, or if its Qualifiers cannot be applied successfully.
-//
-// If the variable name cannot be found as an Activation variable or in the TypeProvider as
-// a type, then the result is `nil`, `error` with the error indicating the name of the first
-// variable searched as missing.
-func (a *absoluteAttribute) Resolve(vars Activation) (any, error) {
- for _, nm := range a.namespaceNames {
- // If the variable is found, process it. Otherwise, wait until the checks to
- // determine whether the type is unknown before returning.
- obj, found := vars.ResolveName(nm)
- if found {
- obj, isOpt, err := applyQualifiers(vars, obj, a.qualifiers)
- if err != nil {
- return nil, err
- }
- if isOpt {
- val := a.adapter.NativeToValue(obj)
- if types.IsUnknown(val) {
- return val, nil
- }
- return types.OptionalOf(val), nil
- }
- return obj, nil
- }
- // Attempt to resolve the qualified type name if the name is not a variable identifier.
- typ, found := a.provider.FindIdent(nm)
- if found {
- if len(a.qualifiers) == 0 {
- return typ, nil
- }
- }
- }
- var attrNames strings.Builder
- for i, nm := range a.namespaceNames {
- if i != 0 {
- attrNames.WriteString(", ")
- }
- attrNames.WriteString(nm)
- }
- return nil, missingAttribute(attrNames.String())
-}
-
-type conditionalAttribute struct {
- id int64
- expr Interpretable
- truthy Attribute
- falsy Attribute
- adapter types.Adapter
- fac AttributeFactory
-}
-
-// ID is an implementation of the Attribute interface method.
-func (a *conditionalAttribute) ID() int64 {
- // There's a field access after the conditional.
- if a.truthy.ID() == a.falsy.ID() {
- return a.truthy.ID()
- }
- // Otherwise return the conditional id as the consistent id being tracked.
- return a.id
-}
-
-// IsOptional returns trivially false for an attribute as the attribute represents a fully
-// qualified variable name. If the attribute is used in an optional manner, then an attrQualifier
-// is created and marks the attribute as optional.
-func (a *conditionalAttribute) IsOptional() bool {
- return false
-}
-
-// AddQualifier appends the same qualifier to both sides of the conditional, in effect managing
-// the qualification of alternate attributes.
-func (a *conditionalAttribute) AddQualifier(qual Qualifier) (Attribute, error) {
- _, err := a.truthy.AddQualifier(qual)
- if err != nil {
- return nil, err
- }
- _, err = a.falsy.AddQualifier(qual)
- if err != nil {
- return nil, err
- }
- return a, nil
-}
-
-// Qualify is an implementation of the Qualifier interface method.
-func (a *conditionalAttribute) Qualify(vars Activation, obj any) (any, error) {
- return attrQualify(a.fac, vars, obj, a)
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (a *conditionalAttribute) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return attrQualifyIfPresent(a.fac, vars, obj, a, presenceOnly)
-}
-
-// Resolve evaluates the condition, and then resolves the truthy or falsy branch accordingly.
-func (a *conditionalAttribute) Resolve(vars Activation) (any, error) {
- val := a.expr.Eval(vars)
- if val == types.True {
- return a.truthy.Resolve(vars)
- }
- if val == types.False {
- return a.falsy.Resolve(vars)
- }
- if types.IsUnknown(val) {
- return val, nil
- }
- return nil, types.MaybeNoSuchOverloadErr(val).(*types.Err)
-}
-
-// String is an implementation of the Stringer interface method.
-func (a *conditionalAttribute) String() string {
- return fmt.Sprintf("id: %v, truthy attribute: %v, falsy attribute: %v", a.id, a.truthy, a.falsy)
-}
-
-type maybeAttribute struct {
- id int64
- attrs []NamespacedAttribute
- adapter types.Adapter
- provider types.Provider
- fac AttributeFactory
-}
-
-// ID is an implementation of the Attribute interface method.
-func (a *maybeAttribute) ID() int64 {
- return a.attrs[0].ID()
-}
-
-// IsOptional returns trivially false for an attribute as the attribute represents a fully
-// qualified variable name. If the attribute is used in an optional manner, then an attrQualifier
-// is created and marks the attribute as optional.
-func (a *maybeAttribute) IsOptional() bool {
- return false
-}
-
-// AddQualifier adds a qualifier to each possible attribute variant, and also creates
-// a new namespaced variable from the qualified value.
-//
-// The algorithm for building the maybe attribute is as follows:
-//
-// 1. Create a maybe attribute from a simple identifier when it occurs in a parsed-only expression
-//
-// mb = MaybeAttribute(, "a")
-//
-// Initializing the maybe attribute creates an absolute attribute internally which includes the
-// possible namespaced names of the attribute. In this example, let's assume we are in namespace
-// 'ns', then the maybe is either one of the following variable names:
-//
-// possible variables names -- ns.a, a
-//
-// 2. Adding a qualifier to the maybe means that the variable name could be a longer qualified
-// name, or a field selection on one of the possible variable names produced earlier:
-//
-// mb.AddQualifier("b")
-//
-// possible variables names -- ns.a.b, a.b
-// possible field selection -- ns.a['b'], a['b']
-//
-// If none of the attributes within the maybe resolves a value, the result is an error.
-func (a *maybeAttribute) AddQualifier(qual Qualifier) (Attribute, error) {
- str := ""
- isStr := false
- cq, isConst := qual.(ConstantQualifier)
- if isConst {
- str, isStr = cq.Value().Value().(string)
- }
- var augmentedNames []string
- // First add the qualifier to all existing attributes in the oneof.
- for _, attr := range a.attrs {
- if isStr && len(attr.Qualifiers()) == 0 {
- candidateVars := attr.CandidateVariableNames()
- augmentedNames = make([]string, len(candidateVars))
- for i, name := range candidateVars {
- augmentedNames[i] = fmt.Sprintf("%s.%s", name, str)
- }
- }
- _, err := attr.AddQualifier(qual)
- if err != nil {
- return nil, err
- }
- }
- // Next, ensure the most specific variable / type reference is searched first.
- if len(augmentedNames) != 0 {
- a.attrs = append([]NamespacedAttribute{a.fac.AbsoluteAttribute(qual.ID(), augmentedNames...)}, a.attrs...)
- }
- return a, nil
-}
-
-// Qualify is an implementation of the Qualifier interface method.
-func (a *maybeAttribute) Qualify(vars Activation, obj any) (any, error) {
- return attrQualify(a.fac, vars, obj, a)
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (a *maybeAttribute) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return attrQualifyIfPresent(a.fac, vars, obj, a, presenceOnly)
-}
-
-// Resolve follows the variable resolution rules to determine whether the attribute is a variable
-// or a field selection.
-func (a *maybeAttribute) Resolve(vars Activation) (any, error) {
- var maybeErr error
- for _, attr := range a.attrs {
- obj, err := attr.Resolve(vars)
- // Return an error if one is encountered.
- if err != nil {
- resErr, ok := err.(*resolutionError)
- if !ok {
- return nil, err
- }
- // If this was not a missing variable error, return it.
- if !resErr.isMissingAttribute() {
- return nil, err
- }
- // When the variable is missing in a maybe attribute we defer erroring.
- if maybeErr == nil {
- maybeErr = resErr
- }
- // Continue attempting to resolve possible variables.
- continue
- }
- return obj, nil
- }
- // Else, produce a no such attribute error.
- return nil, maybeErr
-}
-
-// String is an implementation of the Stringer interface method.
-func (a *maybeAttribute) String() string {
- return fmt.Sprintf("id: %v, attributes: %v", a.id, a.attrs)
-}
-
-type relativeAttribute struct {
- id int64
- operand Interpretable
- qualifiers []Qualifier
- adapter types.Adapter
- fac AttributeFactory
-}
-
-// ID is an implementation of the Attribute interface method.
-func (a *relativeAttribute) ID() int64 {
- qualCount := len(a.qualifiers)
- if qualCount == 0 {
- return a.id
- }
- return a.qualifiers[qualCount-1].ID()
-}
-
-// IsOptional returns trivially false for an attribute as the attribute represents a fully
-// qualified variable name. If the attribute is used in an optional manner, then an attrQualifier
-// is created and marks the attribute as optional.
-func (a *relativeAttribute) IsOptional() bool {
- return false
-}
-
-// AddQualifier implements the Attribute interface method.
-func (a *relativeAttribute) AddQualifier(qual Qualifier) (Attribute, error) {
- a.qualifiers = append(a.qualifiers, qual)
- return a, nil
-}
-
-// Qualify is an implementation of the Qualifier interface method.
-func (a *relativeAttribute) Qualify(vars Activation, obj any) (any, error) {
- return attrQualify(a.fac, vars, obj, a)
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (a *relativeAttribute) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return attrQualifyIfPresent(a.fac, vars, obj, a, presenceOnly)
-}
-
-// Resolve expression value and qualifier relative to the expression result.
-func (a *relativeAttribute) Resolve(vars Activation) (any, error) {
- // First, evaluate the operand.
- v := a.operand.Eval(vars)
- if types.IsError(v) {
- return nil, v.(*types.Err)
- }
- if types.IsUnknown(v) {
- return v, nil
- }
- obj, isOpt, err := applyQualifiers(vars, v, a.qualifiers)
- if err != nil {
- return nil, err
- }
- if isOpt {
- val := a.adapter.NativeToValue(obj)
- if types.IsUnknown(val) {
- return val, nil
- }
- return types.OptionalOf(val), nil
- }
- return obj, nil
-}
-
-// String is an implementation of the Stringer interface method.
-func (a *relativeAttribute) String() string {
- return fmt.Sprintf("id: %v, operand: %v", a.id, a.operand)
-}
-
-func newQualifier(adapter types.Adapter, id int64, v any, opt bool) (Qualifier, error) {
- var qual Qualifier
- switch val := v.(type) {
- case Attribute:
- // Note, attributes are initially identified as non-optional since they represent a top-level
- // field access; however, when used as a relative qualifier, e.g. a[?b.c], then an attrQualifier
- // is created which intercepts the IsOptional check for the attribute in order to return the
- // correct result.
- return &attrQualifier{
- id: id,
- Attribute: val,
- optional: opt,
- }, nil
- case string:
- qual = &stringQualifier{
- id: id,
- value: val,
- celValue: types.String(val),
- adapter: adapter,
- optional: opt,
- }
- case int:
- qual = &intQualifier{
- id: id, value: int64(val), celValue: types.Int(val), adapter: adapter, optional: opt,
- }
- case int32:
- qual = &intQualifier{
- id: id, value: int64(val), celValue: types.Int(val), adapter: adapter, optional: opt,
- }
- case int64:
- qual = &intQualifier{
- id: id, value: val, celValue: types.Int(val), adapter: adapter, optional: opt,
- }
- case uint:
- qual = &uintQualifier{
- id: id, value: uint64(val), celValue: types.Uint(val), adapter: adapter, optional: opt,
- }
- case uint32:
- qual = &uintQualifier{
- id: id, value: uint64(val), celValue: types.Uint(val), adapter: adapter, optional: opt,
- }
- case uint64:
- qual = &uintQualifier{
- id: id, value: val, celValue: types.Uint(val), adapter: adapter, optional: opt,
- }
- case bool:
- qual = &boolQualifier{
- id: id, value: val, celValue: types.Bool(val), adapter: adapter, optional: opt,
- }
- case float32:
- qual = &doubleQualifier{
- id: id,
- value: float64(val),
- celValue: types.Double(val),
- adapter: adapter,
- optional: opt,
- }
- case float64:
- qual = &doubleQualifier{
- id: id, value: val, celValue: types.Double(val), adapter: adapter, optional: opt,
- }
- case types.String:
- qual = &stringQualifier{
- id: id, value: string(val), celValue: val, adapter: adapter, optional: opt,
- }
- case types.Int:
- qual = &intQualifier{
- id: id, value: int64(val), celValue: val, adapter: adapter, optional: opt,
- }
- case types.Uint:
- qual = &uintQualifier{
- id: id, value: uint64(val), celValue: val, adapter: adapter, optional: opt,
- }
- case types.Bool:
- qual = &boolQualifier{
- id: id, value: bool(val), celValue: val, adapter: adapter, optional: opt,
- }
- case types.Double:
- qual = &doubleQualifier{
- id: id, value: float64(val), celValue: val, adapter: adapter, optional: opt,
- }
- case *types.Unknown:
- qual = &unknownQualifier{id: id, value: val}
- default:
- if q, ok := v.(Qualifier); ok {
- return q, nil
- }
- return nil, fmt.Errorf("invalid qualifier type: %T", v)
- }
- return qual, nil
-}
-
-type attrQualifier struct {
- id int64
- Attribute
- optional bool
-}
-
-// ID implements the Qualifier interface method and returns the qualification instruction id
-// rather than the attribute id.
-func (q *attrQualifier) ID() int64 {
- return q.id
-}
-
-// IsOptional implements the Qualifier interface method.
-func (q *attrQualifier) IsOptional() bool {
- return q.optional
-}
-
-type stringQualifier struct {
- id int64
- value string
- celValue ref.Val
- adapter types.Adapter
- optional bool
-}
-
-// ID is an implementation of the Qualifier interface method.
-func (q *stringQualifier) ID() int64 {
- return q.id
-}
-
-// IsOptional implements the Qualifier interface method.
-func (q *stringQualifier) IsOptional() bool {
- return q.optional
-}
-
-// Qualify implements the Qualifier interface method.
-func (q *stringQualifier) Qualify(vars Activation, obj any) (any, error) {
- val, _, err := q.qualifyInternal(vars, obj, false, false)
- return val, err
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (q *stringQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return q.qualifyInternal(vars, obj, true, presenceOnly)
-}
-
-func (q *stringQualifier) qualifyInternal(vars Activation, obj any, presenceTest, presenceOnly bool) (any, bool, error) {
- s := q.value
- switch o := obj.(type) {
- case map[string]any:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]string:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]int:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]int32:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]int64:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]uint:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]uint32:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]uint64:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]float32:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]float64:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- case map[string]bool:
- obj, isKey := o[s]
- if isKey {
- return obj, true, nil
- }
- default:
- return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
- }
- if presenceTest {
- return nil, false, nil
- }
- return nil, false, missingKey(q.celValue)
-}
-
-// Value implements the ConstantQualifier interface
-func (q *stringQualifier) Value() ref.Val {
- return q.celValue
-}
-
-type intQualifier struct {
- id int64
- value int64
- celValue ref.Val
- adapter types.Adapter
- optional bool
-}
-
-// ID is an implementation of the Qualifier interface method.
-func (q *intQualifier) ID() int64 {
- return q.id
-}
-
-// IsOptional implements the Qualifier interface method.
-func (q *intQualifier) IsOptional() bool {
- return q.optional
-}
-
-// Qualify implements the Qualifier interface method.
-func (q *intQualifier) Qualify(vars Activation, obj any) (any, error) {
- val, _, err := q.qualifyInternal(vars, obj, false, false)
- return val, err
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (q *intQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return q.qualifyInternal(vars, obj, true, presenceOnly)
-}
-
-func (q *intQualifier) qualifyInternal(vars Activation, obj any, presenceTest, presenceOnly bool) (any, bool, error) {
- i := q.value
- var isMap bool
- switch o := obj.(type) {
- // The specialized map types supported by an int qualifier are considerably fewer than the set
- // of specialized map types supported by string qualifiers since they are less frequently used
- // than string-based map keys. Additional specializations may be added in the future if
- // desired.
- case map[int]any:
- isMap = true
- obj, isKey := o[int(i)]
- if isKey {
- return obj, true, nil
- }
- case map[int32]any:
- isMap = true
- obj, isKey := o[int32(i)]
- if isKey {
- return obj, true, nil
- }
- case map[int64]any:
- isMap = true
- obj, isKey := o[i]
- if isKey {
- return obj, true, nil
- }
- case []any:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []string:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []int:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []int32:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []int64:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []uint:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []uint32:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []uint64:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []float32:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []float64:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- case []bool:
- isIndex := i >= 0 && i < int64(len(o))
- if isIndex {
- return o[i], true, nil
- }
- default:
- return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
- }
- if presenceTest {
- return nil, false, nil
- }
- if isMap {
- return nil, false, missingKey(q.celValue)
- }
- return nil, false, missingIndex(q.celValue)
-}
-
-// Value implements the ConstantQualifier interface
-func (q *intQualifier) Value() ref.Val {
- return q.celValue
-}
-
-type uintQualifier struct {
- id int64
- value uint64
- celValue ref.Val
- adapter types.Adapter
- optional bool
-}
-
-// ID is an implementation of the Qualifier interface method.
-func (q *uintQualifier) ID() int64 {
- return q.id
-}
-
-// IsOptional implements the Qualifier interface method.
-func (q *uintQualifier) IsOptional() bool {
- return q.optional
-}
-
-// Qualify implements the Qualifier interface method.
-func (q *uintQualifier) Qualify(vars Activation, obj any) (any, error) {
- val, _, err := q.qualifyInternal(vars, obj, false, false)
- return val, err
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (q *uintQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return q.qualifyInternal(vars, obj, true, presenceOnly)
-}
-
-func (q *uintQualifier) qualifyInternal(vars Activation, obj any, presenceTest, presenceOnly bool) (any, bool, error) {
- u := q.value
- switch o := obj.(type) {
- // The specialized map types supported by a uint qualifier are considerably fewer than the set
- // of specialized map types supported by string qualifiers since they are less frequently used
- // than string-based map keys. Additional specializations may be added in the future if
- // desired.
- case map[uint]any:
- obj, isKey := o[uint(u)]
- if isKey {
- return obj, true, nil
- }
- case map[uint32]any:
- obj, isKey := o[uint32(u)]
- if isKey {
- return obj, true, nil
- }
- case map[uint64]any:
- obj, isKey := o[u]
- if isKey {
- return obj, true, nil
- }
- default:
- return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
- }
- if presenceTest {
- return nil, false, nil
- }
- return nil, false, missingKey(q.celValue)
-}
-
-// Value implements the ConstantQualifier interface
-func (q *uintQualifier) Value() ref.Val {
- return q.celValue
-}
-
-type boolQualifier struct {
- id int64
- value bool
- celValue ref.Val
- adapter types.Adapter
- optional bool
-}
-
-// ID is an implementation of the Qualifier interface method.
-func (q *boolQualifier) ID() int64 {
- return q.id
-}
-
-// IsOptional implements the Qualifier interface method.
-func (q *boolQualifier) IsOptional() bool {
- return q.optional
-}
-
-// Qualify implements the Qualifier interface method.
-func (q *boolQualifier) Qualify(vars Activation, obj any) (any, error) {
- val, _, err := q.qualifyInternal(vars, obj, false, false)
- return val, err
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (q *boolQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return q.qualifyInternal(vars, obj, true, presenceOnly)
-}
-
-func (q *boolQualifier) qualifyInternal(vars Activation, obj any, presenceTest, presenceOnly bool) (any, bool, error) {
- b := q.value
- switch o := obj.(type) {
- case map[bool]any:
- obj, isKey := o[b]
- if isKey {
- return obj, true, nil
- }
- default:
- return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
- }
- if presenceTest {
- return nil, false, nil
- }
- return nil, false, missingKey(q.celValue)
-}
-
-// Value implements the ConstantQualifier interface
-func (q *boolQualifier) Value() ref.Val {
- return q.celValue
-}
-
-// fieldQualifier indicates that the qualification is a well-defined field with a known
-// field type. When the field type is known this can be used to improve the speed and
-// efficiency of field resolution.
-type fieldQualifier struct {
- id int64
- Name string
- FieldType *types.FieldType
- adapter types.Adapter
- optional bool
-}
-
-// ID is an implementation of the Qualifier interface method.
-func (q *fieldQualifier) ID() int64 {
- return q.id
-}
-
-// IsOptional implements the Qualifier interface method.
-func (q *fieldQualifier) IsOptional() bool {
- return q.optional
-}
-
-// Qualify implements the Qualifier interface method.
-func (q *fieldQualifier) Qualify(vars Activation, obj any) (any, error) {
- if rv, ok := obj.(ref.Val); ok {
- obj = rv.Value()
- }
- val, err := q.FieldType.GetFrom(obj)
- if err != nil {
- return nil, err
- }
- return val, nil
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (q *fieldQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- if rv, ok := obj.(ref.Val); ok {
- obj = rv.Value()
- }
- if !q.FieldType.IsSet(obj) {
- return nil, false, nil
- }
- if presenceOnly {
- return nil, true, nil
- }
- val, err := q.FieldType.GetFrom(obj)
- if err != nil {
- return nil, false, err
- }
- return val, true, nil
-}
-
-// Value implements the ConstantQualifier interface
-func (q *fieldQualifier) Value() ref.Val {
- return types.String(q.Name)
-}
-
-// doubleQualifier qualifies a CEL object, map, or list using a double value.
-//
-// This qualifier is used for working with dynamic data like JSON or protobuf.Any where the value
-// type may not be known ahead of time and may not conform to the standard types supported as valid
-// protobuf map key types.
-type doubleQualifier struct {
- id int64
- value float64
- celValue ref.Val
- adapter types.Adapter
- optional bool
-}
-
-// ID is an implementation of the Qualifier interface method.
-func (q *doubleQualifier) ID() int64 {
- return q.id
-}
-
-// IsOptional implements the Qualifier interface method.
-func (q *doubleQualifier) IsOptional() bool {
- return q.optional
-}
-
-// Qualify implements the Qualifier interface method.
-func (q *doubleQualifier) Qualify(vars Activation, obj any) (any, error) {
- val, _, err := q.qualifyInternal(vars, obj, false, false)
- return val, err
-}
-
-func (q *doubleQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return q.qualifyInternal(vars, obj, true, presenceOnly)
-}
-
-func (q *doubleQualifier) qualifyInternal(vars Activation, obj any, presenceTest, presenceOnly bool) (any, bool, error) {
- return refQualify(q.adapter, obj, q.celValue, presenceTest, presenceOnly)
-}
-
-// Value implements the ConstantQualifier interface
-func (q *doubleQualifier) Value() ref.Val {
- return q.celValue
-}
-
-// unknownQualifier is a simple qualifier which always returns a preconfigured set of unknown values
-// for any value subject to qualification. This is consistent with CEL's unknown handling elsewhere.
-type unknownQualifier struct {
- id int64
- value *types.Unknown
-}
-
-// ID is an implementation of the Qualifier interface method.
-func (q *unknownQualifier) ID() int64 {
- return q.id
-}
-
-// IsOptional returns trivially false as an the unknown value is always returned.
-func (q *unknownQualifier) IsOptional() bool {
- return false
-}
-
-// Qualify returns the unknown value associated with this qualifier.
-func (q *unknownQualifier) Qualify(vars Activation, obj any) (any, error) {
- return q.value, nil
-}
-
-// QualifyIfPresent is an implementation of the Qualifier interface method.
-func (q *unknownQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- return q.value, true, nil
-}
-
-// Value implements the ConstantQualifier interface
-func (q *unknownQualifier) Value() ref.Val {
- return q.value
-}
-
-func applyQualifiers(vars Activation, obj any, qualifiers []Qualifier) (any, bool, error) {
- optObj, isOpt := obj.(*types.Optional)
- if isOpt {
- if !optObj.HasValue() {
- return optObj, false, nil
- }
- obj = optObj.GetValue().Value()
- }
-
- var err error
- for _, qual := range qualifiers {
- var qualObj any
- isOpt = isOpt || qual.IsOptional()
- if isOpt {
- var present bool
- qualObj, present, err = qual.QualifyIfPresent(vars, obj, false)
- if err != nil {
- return nil, false, err
- }
- if !present {
- // We return optional none here with a presence of 'false' as the layers
- // above will attempt to call types.OptionalOf() on a present value if any
- // of the qualifiers is optional.
- return types.OptionalNone, false, nil
- }
- } else {
- qualObj, err = qual.Qualify(vars, obj)
- if err != nil {
- return nil, false, err
- }
- }
- obj = qualObj
- }
- return obj, isOpt, nil
-}
-
-// attrQualify performs a qualification using the result of an attribute evaluation.
-func attrQualify(fac AttributeFactory, vars Activation, obj any, qualAttr Attribute) (any, error) {
- val, err := qualAttr.Resolve(vars)
- if err != nil {
- return nil, err
- }
- qual, err := fac.NewQualifier(nil, qualAttr.ID(), val, qualAttr.IsOptional())
- if err != nil {
- return nil, err
- }
- return qual.Qualify(vars, obj)
-}
-
-// attrQualifyIfPresent conditionally performs the qualification of the result of attribute is present
-// on the target object.
-func attrQualifyIfPresent(fac AttributeFactory, vars Activation, obj any, qualAttr Attribute,
- presenceOnly bool) (any, bool, error) {
- val, err := qualAttr.Resolve(vars)
- if err != nil {
- return nil, false, err
- }
- qual, err := fac.NewQualifier(nil, qualAttr.ID(), val, qualAttr.IsOptional())
- if err != nil {
- return nil, false, err
- }
- return qual.QualifyIfPresent(vars, obj, presenceOnly)
-}
-
-// refQualify attempts to convert the value to a CEL value and then uses reflection methods to try and
-// apply the qualifier with the option to presence test field accesses before retrieving field values.
-func refQualify(adapter types.Adapter, obj any, idx ref.Val, presenceTest, presenceOnly bool) (ref.Val, bool, error) {
- celVal := adapter.NativeToValue(obj)
- switch v := celVal.(type) {
- case *types.Unknown:
- return v, true, nil
- case *types.Err:
- return nil, false, v
- case traits.Mapper:
- val, found := v.Find(idx)
- // If the index is of the wrong type for the map, then it is possible
- // for the Find call to produce an error.
- if types.IsError(val) {
- return nil, false, val.(*types.Err)
- }
- if found {
- return val, true, nil
- }
- if presenceTest {
- return nil, false, nil
- }
- return nil, false, missingKey(idx)
- case traits.Lister:
- // If the index argument is not a valid numeric type, then it is possible
- // for the index operation to produce an error.
- i, err := types.IndexOrError(idx)
- if err != nil {
- return nil, false, err
- }
- celIndex := types.Int(i)
- if i >= 0 && celIndex < v.Size().(types.Int) {
- return v.Get(idx), true, nil
- }
- if presenceTest {
- return nil, false, nil
- }
- return nil, false, missingIndex(idx)
- case traits.Indexer:
- if presenceTest {
- ft, ok := v.(traits.FieldTester)
- if ok {
- presence := ft.IsSet(idx)
- if types.IsError(presence) {
- return nil, false, presence.(*types.Err)
- }
- // If not found or presence only test, then return.
- // Otherwise, if found, obtain the value later on.
- if presenceOnly || presence == types.False {
- return nil, presence == types.True, nil
- }
- }
- }
- val := v.Get(idx)
- if types.IsError(val) {
- return nil, false, val.(*types.Err)
- }
- return val, true, nil
- default:
- if presenceTest {
- return nil, false, nil
- }
- return nil, false, missingKey(idx)
- }
-}
-
-// resolutionError is a custom error type which encodes the different error states which may
-// occur during attribute resolution.
-type resolutionError struct {
- missingAttribute string
- missingIndex ref.Val
- missingKey ref.Val
-}
-
-func (e *resolutionError) isMissingAttribute() bool {
- return e.missingAttribute != ""
-}
-
-func missingIndex(missing ref.Val) *resolutionError {
- return &resolutionError{
- missingIndex: missing,
- }
-}
-
-func missingKey(missing ref.Val) *resolutionError {
- return &resolutionError{
- missingKey: missing,
- }
-}
-
-func missingAttribute(attr string) *resolutionError {
- return &resolutionError{
- missingAttribute: attr,
- }
-}
-
-// Error implements the error interface method.
-func (e *resolutionError) Error() string {
- if e.missingKey != nil {
- return fmt.Sprintf("no such key: %v", e.missingKey)
- }
- if e.missingIndex != nil {
- return fmt.Sprintf("index out of bounds: %v", e.missingIndex)
- }
- if e.missingAttribute != "" {
- return fmt.Sprintf("no such attribute(s): %s", e.missingAttribute)
- }
- return "invalid attribute"
-}
-
-// Is implements the errors.Is() method used by more recent versions of Go.
-func (e *resolutionError) Is(err error) bool {
- return err.Error() == e.Error()
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/decorators.go b/vendor/github.com/google/cel-go/interpreter/decorators.go
deleted file mode 100644
index 502db35fc..000000000
--- a/vendor/github.com/google/cel-go/interpreter/decorators.go
+++ /dev/null
@@ -1,272 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-)
-
-// InterpretableDecorator is a functional interface for decorating or replacing
-// Interpretable expression nodes at construction time.
-type InterpretableDecorator func(Interpretable) (Interpretable, error)
-
-// decObserveEval records evaluation state into an EvalState object.
-func decObserveEval(observer EvalObserver) InterpretableDecorator {
- return func(i Interpretable) (Interpretable, error) {
- switch inst := i.(type) {
- case *evalWatch, *evalWatchAttr, *evalWatchConst, *evalWatchConstructor:
- // these instruction are already watching, return straight-away.
- return i, nil
- case InterpretableAttribute:
- return &evalWatchAttr{
- InterpretableAttribute: inst,
- observer: observer,
- }, nil
- case InterpretableConst:
- return &evalWatchConst{
- InterpretableConst: inst,
- observer: observer,
- }, nil
- case InterpretableConstructor:
- return &evalWatchConstructor{
- constructor: inst,
- observer: observer,
- }, nil
- default:
- return &evalWatch{
- Interpretable: i,
- observer: observer,
- }, nil
- }
- }
-}
-
-// decInterruptFolds creates an intepretable decorator which marks comprehensions as interruptable
-// where the interrupt state is communicated via a hidden variable on the Activation.
-func decInterruptFolds() InterpretableDecorator {
- return func(i Interpretable) (Interpretable, error) {
- fold, ok := i.(*evalFold)
- if !ok {
- return i, nil
- }
- fold.interruptable = true
- return fold, nil
- }
-}
-
-// decDisableShortcircuits ensures that all branches of an expression will be evaluated, no short-circuiting.
-func decDisableShortcircuits() InterpretableDecorator {
- return func(i Interpretable) (Interpretable, error) {
- switch expr := i.(type) {
- case *evalOr:
- return &evalExhaustiveOr{
- id: expr.id,
- terms: expr.terms,
- }, nil
- case *evalAnd:
- return &evalExhaustiveAnd{
- id: expr.id,
- terms: expr.terms,
- }, nil
- case *evalFold:
- expr.exhaustive = true
- return expr, nil
- case InterpretableAttribute:
- cond, isCond := expr.Attr().(*conditionalAttribute)
- if isCond {
- return &evalExhaustiveConditional{
- id: cond.id,
- attr: cond,
- adapter: expr.Adapter(),
- }, nil
- }
- }
- return i, nil
- }
-}
-
-// decOptimize optimizes the program plan by looking for common evaluation patterns and
-// conditionally precomputing the result.
-// - build list and map values with constant elements.
-// - convert 'in' operations to set membership tests if possible.
-func decOptimize() InterpretableDecorator {
- return func(i Interpretable) (Interpretable, error) {
- switch inst := i.(type) {
- case *evalList:
- return maybeBuildListLiteral(i, inst)
- case *evalMap:
- return maybeBuildMapLiteral(i, inst)
- case InterpretableCall:
- if inst.OverloadID() == overloads.InList {
- return maybeOptimizeSetMembership(i, inst)
- }
- if overloads.IsTypeConversionFunction(inst.Function()) {
- return maybeOptimizeConstUnary(i, inst)
- }
- }
- return i, nil
- }
-}
-
-// decRegexOptimizer compiles regex pattern string constants.
-func decRegexOptimizer(regexOptimizations ...*RegexOptimization) InterpretableDecorator {
- functionMatchMap := make(map[string]*RegexOptimization)
- overloadMatchMap := make(map[string]*RegexOptimization)
- for _, m := range regexOptimizations {
- functionMatchMap[m.Function] = m
- if m.OverloadID != "" {
- overloadMatchMap[m.OverloadID] = m
- }
- }
-
- return func(i Interpretable) (Interpretable, error) {
- call, ok := i.(InterpretableCall)
- if !ok {
- return i, nil
- }
-
- var matcher *RegexOptimization
- var found bool
- if call.OverloadID() != "" {
- matcher, found = overloadMatchMap[call.OverloadID()]
- }
- if !found {
- matcher, found = functionMatchMap[call.Function()]
- }
- if !found || matcher.RegexIndex >= len(call.Args()) {
- return i, nil
- }
- args := call.Args()
- regexArg := args[matcher.RegexIndex]
- regexStr, isConst := regexArg.(InterpretableConst)
- if !isConst {
- return i, nil
- }
- pattern, ok := regexStr.Value().(types.String)
- if !ok {
- return i, nil
- }
- return matcher.Factory(call, string(pattern))
- }
-}
-
-func maybeOptimizeConstUnary(i Interpretable, call InterpretableCall) (Interpretable, error) {
- args := call.Args()
- if len(args) != 1 {
- return i, nil
- }
- _, isConst := args[0].(InterpretableConst)
- if !isConst {
- return i, nil
- }
- val := call.Eval(EmptyActivation())
- if types.IsError(val) {
- return nil, val.(*types.Err)
- }
- return NewConstValue(call.ID(), val), nil
-}
-
-func maybeBuildListLiteral(i Interpretable, l *evalList) (Interpretable, error) {
- for _, elem := range l.elems {
- _, isConst := elem.(InterpretableConst)
- if !isConst {
- return i, nil
- }
- }
- return NewConstValue(l.ID(), l.Eval(EmptyActivation())), nil
-}
-
-func maybeBuildMapLiteral(i Interpretable, mp *evalMap) (Interpretable, error) {
- for idx, key := range mp.keys {
- _, isConst := key.(InterpretableConst)
- if !isConst {
- return i, nil
- }
- _, isConst = mp.vals[idx].(InterpretableConst)
- if !isConst {
- return i, nil
- }
- }
- return NewConstValue(mp.ID(), mp.Eval(EmptyActivation())), nil
-}
-
-// maybeOptimizeSetMembership may convert an 'in' operation against a list to map key membership
-// test if the following conditions are true:
-// - the list is a constant with homogeneous element types.
-// - the elements are all of primitive type.
-func maybeOptimizeSetMembership(i Interpretable, inlist InterpretableCall) (Interpretable, error) {
- args := inlist.Args()
- lhs := args[0]
- rhs := args[1]
- l, isConst := rhs.(InterpretableConst)
- if !isConst {
- return i, nil
- }
- // When the incoming binary call is flagged with as the InList overload, the value will
- // always be convertible to a `traits.Lister` type.
- list := l.Value().(traits.Lister)
- if list.Size() == types.IntZero {
- return NewConstValue(inlist.ID(), types.False), nil
- }
- it := list.Iterator()
- valueSet := make(map[ref.Val]ref.Val)
- for it.HasNext() == types.True {
- elem := it.Next()
- if !types.IsPrimitiveType(elem) || elem.Type() == types.BytesType {
- // Note, non-primitive type are not yet supported, and []byte isn't hashable.
- return i, nil
- }
- valueSet[elem] = types.True
- switch ev := elem.(type) {
- case types.Double:
- iv := ev.ConvertToType(types.IntType)
- // Ensure that only lossless conversions are added to the set
- if !types.IsError(iv) && iv.Equal(ev) == types.True {
- valueSet[iv] = types.True
- }
- // Ensure that only lossless conversions are added to the set
- uv := ev.ConvertToType(types.UintType)
- if !types.IsError(uv) && uv.Equal(ev) == types.True {
- valueSet[uv] = types.True
- }
- case types.Int:
- dv := ev.ConvertToType(types.DoubleType)
- if !types.IsError(dv) {
- valueSet[dv] = types.True
- }
- uv := ev.ConvertToType(types.UintType)
- if !types.IsError(uv) {
- valueSet[uv] = types.True
- }
- case types.Uint:
- dv := ev.ConvertToType(types.DoubleType)
- if !types.IsError(dv) {
- valueSet[dv] = types.True
- }
- iv := ev.ConvertToType(types.IntType)
- if !types.IsError(iv) {
- valueSet[iv] = types.True
- }
- }
- }
- return &evalSetMembership{
- inst: inlist,
- arg: lhs,
- valueSet: valueSet,
- }, nil
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/dispatcher.go b/vendor/github.com/google/cel-go/interpreter/dispatcher.go
deleted file mode 100644
index 8f0bdb7b8..000000000
--- a/vendor/github.com/google/cel-go/interpreter/dispatcher.go
+++ /dev/null
@@ -1,100 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common/functions"
-)
-
-// Dispatcher resolves function calls to their appropriate overload.
-type Dispatcher interface {
- // Add one or more overloads, returning an error if any Overload has the same Overload#Name.
- Add(overloads ...*functions.Overload) error
-
- // FindOverload returns an Overload definition matching the provided name.
- FindOverload(overload string) (*functions.Overload, bool)
-
- // OverloadIds returns the set of all overload identifiers configured for dispatch.
- OverloadIds() []string
-}
-
-// NewDispatcher returns an empty Dispatcher instance.
-func NewDispatcher() Dispatcher {
- return &defaultDispatcher{
- overloads: make(map[string]*functions.Overload)}
-}
-
-// ExtendDispatcher returns a Dispatcher which inherits the overloads of its parent, and
-// provides an isolation layer between built-ins and extension functions which is useful
-// for forward compatibility.
-func ExtendDispatcher(parent Dispatcher) Dispatcher {
- return &defaultDispatcher{
- parent: parent,
- overloads: make(map[string]*functions.Overload)}
-}
-
-// overloadMap helper type for indexing overloads by function name.
-type overloadMap map[string]*functions.Overload
-
-// defaultDispatcher struct which contains an overload map.
-type defaultDispatcher struct {
- parent Dispatcher
- overloads overloadMap
-}
-
-// Add implements the Dispatcher.Add interface method.
-func (d *defaultDispatcher) Add(overloads ...*functions.Overload) error {
- for _, o := range overloads {
- // add the overload unless an overload of the same name has already been provided.
- if _, found := d.overloads[o.Operator]; found {
- return fmt.Errorf("overload already exists '%s'", o.Operator)
- }
- // index the overload by function name.
- d.overloads[o.Operator] = o
- }
- return nil
-}
-
-// FindOverload implements the Dispatcher.FindOverload interface method.
-func (d *defaultDispatcher) FindOverload(overload string) (*functions.Overload, bool) {
- o, found := d.overloads[overload]
- // Attempt to dispatch to an overload defined in the parent.
- if !found && d.parent != nil {
- return d.parent.FindOverload(overload)
- }
- return o, found
-}
-
-// OverloadIds implements the Dispatcher interface method.
-func (d *defaultDispatcher) OverloadIds() []string {
- i := 0
- overloads := make([]string, len(d.overloads))
- for name := range d.overloads {
- overloads[i] = name
- i++
- }
- if d.parent == nil {
- return overloads
- }
- parentOverloads := d.parent.OverloadIds()
- for _, pName := range parentOverloads {
- if _, found := d.overloads[pName]; !found {
- overloads = append(overloads, pName)
- }
- }
- return overloads
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/evalstate.go b/vendor/github.com/google/cel-go/interpreter/evalstate.go
deleted file mode 100644
index 4bdd1fdc7..000000000
--- a/vendor/github.com/google/cel-go/interpreter/evalstate.go
+++ /dev/null
@@ -1,79 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "github.com/google/cel-go/common/types/ref"
-)
-
-// EvalState tracks the values associated with expression ids during execution.
-type EvalState interface {
- // IDs returns the list of ids with recorded values.
- IDs() []int64
-
- // Value returns the observed value of the given expression id if found, and a nil false
- // result if not.
- Value(int64) (ref.Val, bool)
-
- // SetValue sets the observed value of the expression id.
- SetValue(int64, ref.Val)
-
- // Reset clears the previously recorded expression values.
- Reset()
-}
-
-// evalState permits the mutation of evaluation state for a given expression id.
-type evalState struct {
- values map[int64]ref.Val
-}
-
-// NewEvalState returns an EvalState instanced used to observe the intermediate
-// evaluations of an expression.
-func NewEvalState() EvalState {
- return &evalState{
- values: make(map[int64]ref.Val),
- }
-}
-
-// IDs implements the EvalState interface method.
-func (s *evalState) IDs() []int64 {
- var ids []int64
- for k, v := range s.values {
- if v != nil {
- ids = append(ids, k)
- }
- }
- return ids
-}
-
-// Value is an implementation of the EvalState interface method.
-func (s *evalState) Value(exprID int64) (ref.Val, bool) {
- val, found := s.values[exprID]
- return val, found
-}
-
-// SetValue is an implementation of the EvalState interface method.
-func (s *evalState) SetValue(exprID int64, val ref.Val) {
- if val == nil {
- delete(s.values, exprID)
- } else {
- s.values[exprID] = val
- }
-}
-
-// Reset implements the EvalState interface method.
-func (s *evalState) Reset() {
- s.values = map[int64]ref.Val{}
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/functions/BUILD.bazel b/vendor/github.com/google/cel-go/interpreter/functions/BUILD.bazel
deleted file mode 100644
index 4a80c3ea0..000000000
--- a/vendor/github.com/google/cel-go/interpreter/functions/BUILD.bazel
+++ /dev/null
@@ -1,17 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-
-package(
- default_visibility = ["//visibility:public"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "functions.go",
- ],
- importpath = "github.com/google/cel-go/interpreter/functions",
- deps = [
- "//common/functions:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/interpreter/functions/functions.go b/vendor/github.com/google/cel-go/interpreter/functions/functions.go
deleted file mode 100644
index 21ffb6924..000000000
--- a/vendor/github.com/google/cel-go/interpreter/functions/functions.go
+++ /dev/null
@@ -1,39 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package functions defines the standard builtin functions supported by the
-// interpreter and as declared within the checker#StandardDeclarations.
-package functions
-
-import fn "github.com/google/cel-go/common/functions"
-
-// Overload defines a named overload of a function, indicating an operand trait
-// which must be present on the first argument to the overload as well as one
-// of either a unary, binary, or function implementation.
-//
-// The majority of operators within the expression language are unary or binary
-// and the specializations simplify the call contract for implementers of
-// types with operator overloads. Any added complexity is assumed to be handled
-// by the generic FunctionOp.
-type Overload = fn.Overload
-
-// UnaryOp is a function that takes a single value and produces an output.
-type UnaryOp = fn.UnaryOp
-
-// BinaryOp is a function that takes two values and produces an output.
-type BinaryOp = fn.BinaryOp
-
-// FunctionOp is a function with accepts zero or more arguments and produces
-// a value or error as a result.
-type FunctionOp = fn.FunctionOp
diff --git a/vendor/github.com/google/cel-go/interpreter/interpretable.go b/vendor/github.com/google/cel-go/interpreter/interpretable.go
deleted file mode 100644
index c4598dfa7..000000000
--- a/vendor/github.com/google/cel-go/interpreter/interpretable.go
+++ /dev/null
@@ -1,1262 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common/functions"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-)
-
-// Interpretable can accept a given Activation and produce a value along with
-// an accompanying EvalState which can be used to inspect whether additional
-// data might be necessary to complete the evaluation.
-type Interpretable interface {
- // ID value corresponding to the expression node.
- ID() int64
-
- // Eval an Activation to produce an output.
- Eval(activation Activation) ref.Val
-}
-
-// InterpretableConst interface for tracking whether the Interpretable is a constant value.
-type InterpretableConst interface {
- Interpretable
-
- // Value returns the constant value of the instruction.
- Value() ref.Val
-}
-
-// InterpretableAttribute interface for tracking whether the Interpretable is an attribute.
-type InterpretableAttribute interface {
- Interpretable
-
- // Attr returns the Attribute value.
- Attr() Attribute
-
- // Adapter returns the type adapter to be used for adapting resolved Attribute values.
- Adapter() types.Adapter
-
- // AddQualifier proxies the Attribute.AddQualifier method.
- //
- // Note, this method may mutate the current attribute state. If the desire is to clone the
- // Attribute, the Attribute should first be copied before adding the qualifier. Attributes
- // are not copyable by default, so this is a capable that would need to be added to the
- // AttributeFactory or specifically to the underlying Attribute implementation.
- AddQualifier(Qualifier) (Attribute, error)
-
- // Qualify replicates the Attribute.Qualify method to permit extension and interception
- // of object qualification.
- Qualify(vars Activation, obj any) (any, error)
-
- // QualifyIfPresent qualifies the object if the qualifier is declared or defined on the object.
- // The 'presenceOnly' flag indicates that the value is not necessary, just a boolean status as
- // to whether the qualifier is present.
- QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error)
-
- // IsOptional indicates whether the resulting value is an optional type.
- IsOptional() bool
-
- // Resolve returns the value of the Attribute given the current Activation.
- Resolve(Activation) (any, error)
-}
-
-// InterpretableCall interface for inspecting Interpretable instructions related to function calls.
-type InterpretableCall interface {
- Interpretable
-
- // Function returns the function name as it appears in text or mangled operator name as it
- // appears in the operators.go file.
- Function() string
-
- // OverloadID returns the overload id associated with the function specialization.
- // Overload ids are stable across language boundaries and can be treated as synonymous with a
- // unique function signature.
- OverloadID() string
-
- // Args returns the normalized arguments to the function overload.
- // For receiver-style functions, the receiver target is arg 0.
- Args() []Interpretable
-}
-
-// InterpretableConstructor interface for inspecting Interpretable instructions that initialize a list, map
-// or struct.
-type InterpretableConstructor interface {
- Interpretable
-
- // InitVals returns all the list elements, map key and values or struct field values.
- InitVals() []Interpretable
-
- // Type returns the type constructed.
- Type() ref.Type
-}
-
-// Core Interpretable implementations used during the program planning phase.
-
-type evalTestOnly struct {
- id int64
- InterpretableAttribute
-}
-
-// ID implements the Interpretable interface method.
-func (test *evalTestOnly) ID() int64 {
- return test.id
-}
-
-// Eval implements the Interpretable interface method.
-func (test *evalTestOnly) Eval(ctx Activation) ref.Val {
- val, err := test.Resolve(ctx)
- // Return an error if the resolve step fails
- if err != nil {
- return types.WrapErr(err)
- }
- if optVal, isOpt := val.(*types.Optional); isOpt {
- return types.Bool(optVal.HasValue())
- }
- return test.Adapter().NativeToValue(val)
-}
-
-// AddQualifier appends a qualifier that will always and only perform a presence test.
-func (test *evalTestOnly) AddQualifier(q Qualifier) (Attribute, error) {
- cq, ok := q.(ConstantQualifier)
- if !ok {
- return nil, fmt.Errorf("test only expressions must have constant qualifiers: %v", q)
- }
- return test.InterpretableAttribute.AddQualifier(&testOnlyQualifier{ConstantQualifier: cq})
-}
-
-type testOnlyQualifier struct {
- ConstantQualifier
-}
-
-// Qualify determines whether the test-only qualifier is present on the input object.
-func (q *testOnlyQualifier) Qualify(vars Activation, obj any) (any, error) {
- out, present, err := q.ConstantQualifier.QualifyIfPresent(vars, obj, true)
- if err != nil {
- return nil, err
- }
- if unk, isUnk := out.(types.Unknown); isUnk {
- return unk, nil
- }
- if opt, isOpt := out.(types.Optional); isOpt {
- return opt.HasValue(), nil
- }
- return present, nil
-}
-
-// QualifyIfPresent returns whether the target field in the test-only expression is present.
-func (q *testOnlyQualifier) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- // Only ever test for presence.
- return q.ConstantQualifier.QualifyIfPresent(vars, obj, true)
-}
-
-// QualifierValueEquals determines whether the test-only constant qualifier equals the input value.
-func (q *testOnlyQualifier) QualifierValueEquals(value any) bool {
- // The input qualifier will always be of type string
- return q.ConstantQualifier.Value().Value() == value
-}
-
-// NewConstValue creates a new constant valued Interpretable.
-func NewConstValue(id int64, val ref.Val) InterpretableConst {
- return &evalConst{
- id: id,
- val: val,
- }
-}
-
-type evalConst struct {
- id int64
- val ref.Val
-}
-
-// ID implements the Interpretable interface method.
-func (cons *evalConst) ID() int64 {
- return cons.id
-}
-
-// Eval implements the Interpretable interface method.
-func (cons *evalConst) Eval(ctx Activation) ref.Val {
- return cons.val
-}
-
-// Value implements the InterpretableConst interface method.
-func (cons *evalConst) Value() ref.Val {
- return cons.val
-}
-
-type evalOr struct {
- id int64
- terms []Interpretable
-}
-
-// ID implements the Interpretable interface method.
-func (or *evalOr) ID() int64 {
- return or.id
-}
-
-// Eval implements the Interpretable interface method.
-func (or *evalOr) Eval(ctx Activation) ref.Val {
- var err ref.Val = nil
- var unk *types.Unknown
- for _, term := range or.terms {
- val := term.Eval(ctx)
- boolVal, ok := val.(types.Bool)
- // short-circuit on true.
- if ok && boolVal == types.True {
- return types.True
- }
- if !ok {
- isUnk := false
- unk, isUnk = types.MaybeMergeUnknowns(val, unk)
- if !isUnk && err == nil {
- if types.IsError(val) {
- err = val
- } else {
- err = types.MaybeNoSuchOverloadErr(val)
- }
- }
- }
- }
- if unk != nil {
- return unk
- }
- if err != nil {
- return err
- }
- return types.False
-}
-
-type evalAnd struct {
- id int64
- terms []Interpretable
-}
-
-// ID implements the Interpretable interface method.
-func (and *evalAnd) ID() int64 {
- return and.id
-}
-
-// Eval implements the Interpretable interface method.
-func (and *evalAnd) Eval(ctx Activation) ref.Val {
- var err ref.Val = nil
- var unk *types.Unknown
- for _, term := range and.terms {
- val := term.Eval(ctx)
- boolVal, ok := val.(types.Bool)
- // short-circuit on false.
- if ok && boolVal == types.False {
- return types.False
- }
- if !ok {
- isUnk := false
- unk, isUnk = types.MaybeMergeUnknowns(val, unk)
- if !isUnk && err == nil {
- if types.IsError(val) {
- err = val
- } else {
- err = types.MaybeNoSuchOverloadErr(val)
- }
- }
- }
- }
- if unk != nil {
- return unk
- }
- if err != nil {
- return err
- }
- return types.True
-}
-
-type evalEq struct {
- id int64
- lhs Interpretable
- rhs Interpretable
-}
-
-// ID implements the Interpretable interface method.
-func (eq *evalEq) ID() int64 {
- return eq.id
-}
-
-// Eval implements the Interpretable interface method.
-func (eq *evalEq) Eval(ctx Activation) ref.Val {
- lVal := eq.lhs.Eval(ctx)
- rVal := eq.rhs.Eval(ctx)
- if types.IsUnknownOrError(lVal) {
- return lVal
- }
- if types.IsUnknownOrError(rVal) {
- return rVal
- }
- return types.Equal(lVal, rVal)
-}
-
-// Function implements the InterpretableCall interface method.
-func (*evalEq) Function() string {
- return operators.Equals
-}
-
-// OverloadID implements the InterpretableCall interface method.
-func (*evalEq) OverloadID() string {
- return overloads.Equals
-}
-
-// Args implements the InterpretableCall interface method.
-func (eq *evalEq) Args() []Interpretable {
- return []Interpretable{eq.lhs, eq.rhs}
-}
-
-type evalNe struct {
- id int64
- lhs Interpretable
- rhs Interpretable
-}
-
-// ID implements the Interpretable interface method.
-func (ne *evalNe) ID() int64 {
- return ne.id
-}
-
-// Eval implements the Interpretable interface method.
-func (ne *evalNe) Eval(ctx Activation) ref.Val {
- lVal := ne.lhs.Eval(ctx)
- rVal := ne.rhs.Eval(ctx)
- if types.IsUnknownOrError(lVal) {
- return lVal
- }
- if types.IsUnknownOrError(rVal) {
- return rVal
- }
- return types.Bool(types.Equal(lVal, rVal) != types.True)
-}
-
-// Function implements the InterpretableCall interface method.
-func (*evalNe) Function() string {
- return operators.NotEquals
-}
-
-// OverloadID implements the InterpretableCall interface method.
-func (*evalNe) OverloadID() string {
- return overloads.NotEquals
-}
-
-// Args implements the InterpretableCall interface method.
-func (ne *evalNe) Args() []Interpretable {
- return []Interpretable{ne.lhs, ne.rhs}
-}
-
-type evalZeroArity struct {
- id int64
- function string
- overload string
- impl functions.FunctionOp
-}
-
-// ID implements the Interpretable interface method.
-func (zero *evalZeroArity) ID() int64 {
- return zero.id
-}
-
-// Eval implements the Interpretable interface method.
-func (zero *evalZeroArity) Eval(ctx Activation) ref.Val {
- return zero.impl()
-}
-
-// Function implements the InterpretableCall interface method.
-func (zero *evalZeroArity) Function() string {
- return zero.function
-}
-
-// OverloadID implements the InterpretableCall interface method.
-func (zero *evalZeroArity) OverloadID() string {
- return zero.overload
-}
-
-// Args returns the argument to the unary function.
-func (zero *evalZeroArity) Args() []Interpretable {
- return []Interpretable{}
-}
-
-type evalUnary struct {
- id int64
- function string
- overload string
- arg Interpretable
- trait int
- impl functions.UnaryOp
- nonStrict bool
-}
-
-// ID implements the Interpretable interface method.
-func (un *evalUnary) ID() int64 {
- return un.id
-}
-
-// Eval implements the Interpretable interface method.
-func (un *evalUnary) Eval(ctx Activation) ref.Val {
- argVal := un.arg.Eval(ctx)
- // Early return if the argument to the function is unknown or error.
- strict := !un.nonStrict
- if strict && types.IsUnknownOrError(argVal) {
- return argVal
- }
- // If the implementation is bound and the argument value has the right traits required to
- // invoke it, then call the implementation.
- if un.impl != nil && (un.trait == 0 || (!strict && types.IsUnknownOrError(argVal)) || argVal.Type().HasTrait(un.trait)) {
- return un.impl(argVal)
- }
- // Otherwise, if the argument is a ReceiverType attempt to invoke the receiver method on the
- // operand (arg0).
- if argVal.Type().HasTrait(traits.ReceiverType) {
- return argVal.(traits.Receiver).Receive(un.function, un.overload, []ref.Val{})
- }
- return types.NewErr("no such overload: %s", un.function)
-}
-
-// Function implements the InterpretableCall interface method.
-func (un *evalUnary) Function() string {
- return un.function
-}
-
-// OverloadID implements the InterpretableCall interface method.
-func (un *evalUnary) OverloadID() string {
- return un.overload
-}
-
-// Args returns the argument to the unary function.
-func (un *evalUnary) Args() []Interpretable {
- return []Interpretable{un.arg}
-}
-
-type evalBinary struct {
- id int64
- function string
- overload string
- lhs Interpretable
- rhs Interpretable
- trait int
- impl functions.BinaryOp
- nonStrict bool
-}
-
-// ID implements the Interpretable interface method.
-func (bin *evalBinary) ID() int64 {
- return bin.id
-}
-
-// Eval implements the Interpretable interface method.
-func (bin *evalBinary) Eval(ctx Activation) ref.Val {
- lVal := bin.lhs.Eval(ctx)
- rVal := bin.rhs.Eval(ctx)
- // Early return if any argument to the function is unknown or error.
- strict := !bin.nonStrict
- if strict {
- if types.IsUnknownOrError(lVal) {
- return lVal
- }
- if types.IsUnknownOrError(rVal) {
- return rVal
- }
- }
- // If the implementation is bound and the argument value has the right traits required to
- // invoke it, then call the implementation.
- if bin.impl != nil && (bin.trait == 0 || (!strict && types.IsUnknownOrError(lVal)) || lVal.Type().HasTrait(bin.trait)) {
- return bin.impl(lVal, rVal)
- }
- // Otherwise, if the argument is a ReceiverType attempt to invoke the receiver method on the
- // operand (arg0).
- if lVal.Type().HasTrait(traits.ReceiverType) {
- return lVal.(traits.Receiver).Receive(bin.function, bin.overload, []ref.Val{rVal})
- }
- return types.NewErr("no such overload: %s", bin.function)
-}
-
-// Function implements the InterpretableCall interface method.
-func (bin *evalBinary) Function() string {
- return bin.function
-}
-
-// OverloadID implements the InterpretableCall interface method.
-func (bin *evalBinary) OverloadID() string {
- return bin.overload
-}
-
-// Args returns the argument to the unary function.
-func (bin *evalBinary) Args() []Interpretable {
- return []Interpretable{bin.lhs, bin.rhs}
-}
-
-type evalVarArgs struct {
- id int64
- function string
- overload string
- args []Interpretable
- trait int
- impl functions.FunctionOp
- nonStrict bool
-}
-
-// NewCall creates a new call Interpretable.
-func NewCall(id int64, function, overload string, args []Interpretable, impl functions.FunctionOp) InterpretableCall {
- return &evalVarArgs{
- id: id,
- function: function,
- overload: overload,
- args: args,
- impl: impl,
- }
-}
-
-// ID implements the Interpretable interface method.
-func (fn *evalVarArgs) ID() int64 {
- return fn.id
-}
-
-// Eval implements the Interpretable interface method.
-func (fn *evalVarArgs) Eval(ctx Activation) ref.Val {
- argVals := make([]ref.Val, len(fn.args))
- // Early return if any argument to the function is unknown or error.
- strict := !fn.nonStrict
- for i, arg := range fn.args {
- argVals[i] = arg.Eval(ctx)
- if strict && types.IsUnknownOrError(argVals[i]) {
- return argVals[i]
- }
- }
- // If the implementation is bound and the argument value has the right traits required to
- // invoke it, then call the implementation.
- arg0 := argVals[0]
- if fn.impl != nil && (fn.trait == 0 || (!strict && types.IsUnknownOrError(arg0)) || arg0.Type().HasTrait(fn.trait)) {
- return fn.impl(argVals...)
- }
- // Otherwise, if the argument is a ReceiverType attempt to invoke the receiver method on the
- // operand (arg0).
- if arg0.Type().HasTrait(traits.ReceiverType) {
- return arg0.(traits.Receiver).Receive(fn.function, fn.overload, argVals[1:])
- }
- return types.NewErr("no such overload: %s", fn.function)
-}
-
-// Function implements the InterpretableCall interface method.
-func (fn *evalVarArgs) Function() string {
- return fn.function
-}
-
-// OverloadID implements the InterpretableCall interface method.
-func (fn *evalVarArgs) OverloadID() string {
- return fn.overload
-}
-
-// Args returns the argument to the unary function.
-func (fn *evalVarArgs) Args() []Interpretable {
- return fn.args
-}
-
-type evalList struct {
- id int64
- elems []Interpretable
- optionals []bool
- hasOptionals bool
- adapter types.Adapter
-}
-
-// ID implements the Interpretable interface method.
-func (l *evalList) ID() int64 {
- return l.id
-}
-
-// Eval implements the Interpretable interface method.
-func (l *evalList) Eval(ctx Activation) ref.Val {
- elemVals := make([]ref.Val, 0, len(l.elems))
- // If any argument is unknown or error early terminate.
- for i, elem := range l.elems {
- elemVal := elem.Eval(ctx)
- if types.IsUnknownOrError(elemVal) {
- return elemVal
- }
- if l.hasOptionals && l.optionals[i] {
- optVal, ok := elemVal.(*types.Optional)
- if !ok {
- return invalidOptionalElementInit(elemVal)
- }
- if !optVal.HasValue() {
- continue
- }
- elemVal = optVal.GetValue()
- }
- elemVals = append(elemVals, elemVal)
- }
- return l.adapter.NativeToValue(elemVals)
-}
-
-func (l *evalList) InitVals() []Interpretable {
- return l.elems
-}
-
-func (l *evalList) Type() ref.Type {
- return types.ListType
-}
-
-type evalMap struct {
- id int64
- keys []Interpretable
- vals []Interpretable
- optionals []bool
- hasOptionals bool
- adapter types.Adapter
-}
-
-// ID implements the Interpretable interface method.
-func (m *evalMap) ID() int64 {
- return m.id
-}
-
-// Eval implements the Interpretable interface method.
-func (m *evalMap) Eval(ctx Activation) ref.Val {
- entries := make(map[ref.Val]ref.Val)
- // If any argument is unknown or error early terminate.
- for i, key := range m.keys {
- keyVal := key.Eval(ctx)
- if types.IsUnknownOrError(keyVal) {
- return keyVal
- }
- valVal := m.vals[i].Eval(ctx)
- if types.IsUnknownOrError(valVal) {
- return valVal
- }
- if m.hasOptionals && m.optionals[i] {
- optVal, ok := valVal.(*types.Optional)
- if !ok {
- return invalidOptionalEntryInit(keyVal, valVal)
- }
- if !optVal.HasValue() {
- delete(entries, keyVal)
- continue
- }
- valVal = optVal.GetValue()
- }
- entries[keyVal] = valVal
- }
- return m.adapter.NativeToValue(entries)
-}
-
-func (m *evalMap) InitVals() []Interpretable {
- if len(m.keys) != len(m.vals) {
- return nil
- }
- result := make([]Interpretable, len(m.keys)+len(m.vals))
- idx := 0
- for i, k := range m.keys {
- v := m.vals[i]
- result[idx] = k
- idx++
- result[idx] = v
- idx++
- }
- return result
-}
-
-func (m *evalMap) Type() ref.Type {
- return types.MapType
-}
-
-type evalObj struct {
- id int64
- typeName string
- fields []string
- vals []Interpretable
- optionals []bool
- hasOptionals bool
- provider types.Provider
-}
-
-// ID implements the Interpretable interface method.
-func (o *evalObj) ID() int64 {
- return o.id
-}
-
-// Eval implements the Interpretable interface method.
-func (o *evalObj) Eval(ctx Activation) ref.Val {
- fieldVals := make(map[string]ref.Val)
- // If any argument is unknown or error early terminate.
- for i, field := range o.fields {
- val := o.vals[i].Eval(ctx)
- if types.IsUnknownOrError(val) {
- return val
- }
- if o.hasOptionals && o.optionals[i] {
- optVal, ok := val.(*types.Optional)
- if !ok {
- return invalidOptionalEntryInit(field, val)
- }
- if !optVal.HasValue() {
- delete(fieldVals, field)
- continue
- }
- val = optVal.GetValue()
- }
- fieldVals[field] = val
- }
- return o.provider.NewValue(o.typeName, fieldVals)
-}
-
-func (o *evalObj) InitVals() []Interpretable {
- return o.vals
-}
-
-func (o *evalObj) Type() ref.Type {
- return types.NewObjectTypeValue(o.typeName)
-}
-
-type evalFold struct {
- id int64
- accuVar string
- iterVar string
- iterRange Interpretable
- accu Interpretable
- cond Interpretable
- step Interpretable
- result Interpretable
- adapter types.Adapter
- exhaustive bool
- interruptable bool
-}
-
-// ID implements the Interpretable interface method.
-func (fold *evalFold) ID() int64 {
- return fold.id
-}
-
-// Eval implements the Interpretable interface method.
-func (fold *evalFold) Eval(ctx Activation) ref.Val {
- foldRange := fold.iterRange.Eval(ctx)
- if !foldRange.Type().HasTrait(traits.IterableType) {
- return types.ValOrErr(foldRange, "got '%T', expected iterable type", foldRange)
- }
- // Configure the fold activation with the accumulator initial value.
- accuCtx := varActivationPool.Get().(*varActivation)
- accuCtx.parent = ctx
- accuCtx.name = fold.accuVar
- accuCtx.val = fold.accu.Eval(ctx)
- // If the accumulator starts as an empty list, then the comprehension will build a list
- // so create a mutable list to optimize the cost of the inner loop.
- l, ok := accuCtx.val.(traits.Lister)
- buildingList := false
- if !fold.exhaustive && ok && l.Size() == types.IntZero {
- buildingList = true
- accuCtx.val = types.NewMutableList(fold.adapter)
- }
- iterCtx := varActivationPool.Get().(*varActivation)
- iterCtx.parent = accuCtx
- iterCtx.name = fold.iterVar
-
- interrupted := false
- it := foldRange.(traits.Iterable).Iterator()
- for it.HasNext() == types.True {
- // Modify the iter var in the fold activation.
- iterCtx.val = it.Next()
-
- // Evaluate the condition, terminate the loop if false.
- cond := fold.cond.Eval(iterCtx)
- condBool, ok := cond.(types.Bool)
- if !fold.exhaustive && ok && condBool != types.True {
- break
- }
- // Evaluate the evaluation step into accu var.
- accuCtx.val = fold.step.Eval(iterCtx)
- if fold.interruptable {
- if stop, found := ctx.ResolveName("#interrupted"); found && stop == true {
- interrupted = true
- break
- }
- }
- }
- varActivationPool.Put(iterCtx)
- if interrupted {
- varActivationPool.Put(accuCtx)
- return types.NewErr("operation interrupted")
- }
-
- // Compute the result.
- res := fold.result.Eval(accuCtx)
- varActivationPool.Put(accuCtx)
- // Convert a mutable list to an immutable one, if the comprehension has generated a list as a result.
- if !types.IsUnknownOrError(res) && buildingList {
- if _, ok := res.(traits.MutableLister); ok {
- res = res.(traits.MutableLister).ToImmutableList()
- }
- }
- return res
-}
-
-// Optional Interpretable implementations that specialize, subsume, or extend the core evaluation
-// plan via decorators.
-
-// evalSetMembership is an Interpretable implementation which tests whether an input value
-// exists within the set of map keys used to model a set.
-type evalSetMembership struct {
- inst Interpretable
- arg Interpretable
- valueSet map[ref.Val]ref.Val
-}
-
-// ID implements the Interpretable interface method.
-func (e *evalSetMembership) ID() int64 {
- return e.inst.ID()
-}
-
-// Eval implements the Interpretable interface method.
-func (e *evalSetMembership) Eval(ctx Activation) ref.Val {
- val := e.arg.Eval(ctx)
- if types.IsUnknownOrError(val) {
- return val
- }
- if ret, found := e.valueSet[val]; found {
- return ret
- }
- return types.False
-}
-
-// evalWatch is an Interpretable implementation that wraps the execution of a given
-// expression so that it may observe the computed value and send it to an observer.
-type evalWatch struct {
- Interpretable
- observer EvalObserver
-}
-
-// Eval implements the Interpretable interface method.
-func (e *evalWatch) Eval(ctx Activation) ref.Val {
- val := e.Interpretable.Eval(ctx)
- e.observer(e.ID(), e.Interpretable, val)
- return val
-}
-
-// evalWatchAttr describes a watcher of an InterpretableAttribute Interpretable.
-//
-// Since the watcher may be selected against at a later stage in program planning, the watcher
-// must implement the InterpretableAttribute interface by proxy.
-type evalWatchAttr struct {
- InterpretableAttribute
- observer EvalObserver
-}
-
-// AddQualifier creates a wrapper over the incoming qualifier which observes the qualification
-// result.
-func (e *evalWatchAttr) AddQualifier(q Qualifier) (Attribute, error) {
- switch qual := q.(type) {
- // By default, the qualifier is either a constant or an attribute
- // There may be some custom cases where the attribute is neither.
- case ConstantQualifier:
- // Expose a method to test whether the qualifier matches the input pattern.
- q = &evalWatchConstQual{
- ConstantQualifier: qual,
- observer: e.observer,
- adapter: e.Adapter(),
- }
- case *evalWatchAttr:
- // Unwrap the evalWatchAttr since the observation will be applied during Qualify or
- // QualifyIfPresent rather than Eval.
- q = &evalWatchAttrQual{
- Attribute: qual.InterpretableAttribute,
- observer: e.observer,
- adapter: e.Adapter(),
- }
- case Attribute:
- // Expose methods which intercept the qualification prior to being applied as a qualifier.
- // Using this interface ensures that the qualifier is converted to a constant value one
- // time during attribute pattern matching as the method embeds the Attribute interface
- // needed to trip the conversion to a constant.
- q = &evalWatchAttrQual{
- Attribute: qual,
- observer: e.observer,
- adapter: e.Adapter(),
- }
- default:
- // This is likely a custom qualifier type.
- q = &evalWatchQual{
- Qualifier: qual,
- observer: e.observer,
- adapter: e.Adapter(),
- }
- }
- _, err := e.InterpretableAttribute.AddQualifier(q)
- return e, err
-}
-
-// Eval implements the Interpretable interface method.
-func (e *evalWatchAttr) Eval(vars Activation) ref.Val {
- val := e.InterpretableAttribute.Eval(vars)
- e.observer(e.ID(), e.InterpretableAttribute, val)
- return val
-}
-
-// evalWatchConstQual observes the qualification of an object using a constant boolean, int,
-// string, or uint.
-type evalWatchConstQual struct {
- ConstantQualifier
- observer EvalObserver
- adapter types.Adapter
-}
-
-// Qualify observes the qualification of a object via a constant boolean, int, string, or uint.
-func (e *evalWatchConstQual) Qualify(vars Activation, obj any) (any, error) {
- out, err := e.ConstantQualifier.Qualify(vars, obj)
- var val ref.Val
- if err != nil {
- val = types.WrapErr(err)
- } else {
- val = e.adapter.NativeToValue(out)
- }
- e.observer(e.ID(), e.ConstantQualifier, val)
- return out, err
-}
-
-// QualifyIfPresent conditionally qualifies the variable and only records a value if one is present.
-func (e *evalWatchConstQual) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- out, present, err := e.ConstantQualifier.QualifyIfPresent(vars, obj, presenceOnly)
- var val ref.Val
- if err != nil {
- val = types.WrapErr(err)
- } else if out != nil {
- val = e.adapter.NativeToValue(out)
- } else if presenceOnly {
- val = types.Bool(present)
- }
- if present || presenceOnly {
- e.observer(e.ID(), e.ConstantQualifier, val)
- }
- return out, present, err
-}
-
-// QualifierValueEquals tests whether the incoming value is equal to the qualifying constant.
-func (e *evalWatchConstQual) QualifierValueEquals(value any) bool {
- qve, ok := e.ConstantQualifier.(qualifierValueEquator)
- return ok && qve.QualifierValueEquals(value)
-}
-
-// evalWatchAttrQual observes the qualification of an object by a value computed at runtime.
-type evalWatchAttrQual struct {
- Attribute
- observer EvalObserver
- adapter ref.TypeAdapter
-}
-
-// Qualify observes the qualification of a object via a value computed at runtime.
-func (e *evalWatchAttrQual) Qualify(vars Activation, obj any) (any, error) {
- out, err := e.Attribute.Qualify(vars, obj)
- var val ref.Val
- if err != nil {
- val = types.WrapErr(err)
- } else {
- val = e.adapter.NativeToValue(out)
- }
- e.observer(e.ID(), e.Attribute, val)
- return out, err
-}
-
-// QualifyIfPresent conditionally qualifies the variable and only records a value if one is present.
-func (e *evalWatchAttrQual) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- out, present, err := e.Attribute.QualifyIfPresent(vars, obj, presenceOnly)
- var val ref.Val
- if err != nil {
- val = types.WrapErr(err)
- } else if out != nil {
- val = e.adapter.NativeToValue(out)
- } else if presenceOnly {
- val = types.Bool(present)
- }
- if present || presenceOnly {
- e.observer(e.ID(), e.Attribute, val)
- }
- return out, present, err
-}
-
-// evalWatchQual observes the qualification of an object by a value computed at runtime.
-type evalWatchQual struct {
- Qualifier
- observer EvalObserver
- adapter types.Adapter
-}
-
-// Qualify observes the qualification of a object via a value computed at runtime.
-func (e *evalWatchQual) Qualify(vars Activation, obj any) (any, error) {
- out, err := e.Qualifier.Qualify(vars, obj)
- var val ref.Val
- if err != nil {
- val = types.WrapErr(err)
- } else {
- val = e.adapter.NativeToValue(out)
- }
- e.observer(e.ID(), e.Qualifier, val)
- return out, err
-}
-
-// QualifyIfPresent conditionally qualifies the variable and only records a value if one is present.
-func (e *evalWatchQual) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) {
- out, present, err := e.Qualifier.QualifyIfPresent(vars, obj, presenceOnly)
- var val ref.Val
- if err != nil {
- val = types.WrapErr(err)
- } else if out != nil {
- val = e.adapter.NativeToValue(out)
- } else if presenceOnly {
- val = types.Bool(present)
- }
- if present || presenceOnly {
- e.observer(e.ID(), e.Qualifier, val)
- }
- return out, present, err
-}
-
-// evalWatchConst describes a watcher of an instConst Interpretable.
-type evalWatchConst struct {
- InterpretableConst
- observer EvalObserver
-}
-
-// Eval implements the Interpretable interface method.
-func (e *evalWatchConst) Eval(vars Activation) ref.Val {
- val := e.Value()
- e.observer(e.ID(), e.InterpretableConst, val)
- return val
-}
-
-// evalExhaustiveOr is just like evalOr, but does not short-circuit argument evaluation.
-type evalExhaustiveOr struct {
- id int64
- terms []Interpretable
-}
-
-// ID implements the Interpretable interface method.
-func (or *evalExhaustiveOr) ID() int64 {
- return or.id
-}
-
-// Eval implements the Interpretable interface method.
-func (or *evalExhaustiveOr) Eval(ctx Activation) ref.Val {
- var err ref.Val = nil
- var unk *types.Unknown
- isTrue := false
- for _, term := range or.terms {
- val := term.Eval(ctx)
- boolVal, ok := val.(types.Bool)
- // flag the result as true
- if ok && boolVal == types.True {
- isTrue = true
- }
- if !ok && !isTrue {
- isUnk := false
- unk, isUnk = types.MaybeMergeUnknowns(val, unk)
- if !isUnk && err == nil {
- if types.IsError(val) {
- err = val
- } else {
- err = types.MaybeNoSuchOverloadErr(val)
- }
- }
- }
- }
- if isTrue {
- return types.True
- }
- if unk != nil {
- return unk
- }
- if err != nil {
- return err
- }
- return types.False
-}
-
-// evalExhaustiveAnd is just like evalAnd, but does not short-circuit argument evaluation.
-type evalExhaustiveAnd struct {
- id int64
- terms []Interpretable
-}
-
-// ID implements the Interpretable interface method.
-func (and *evalExhaustiveAnd) ID() int64 {
- return and.id
-}
-
-// Eval implements the Interpretable interface method.
-func (and *evalExhaustiveAnd) Eval(ctx Activation) ref.Val {
- var err ref.Val = nil
- var unk *types.Unknown
- isFalse := false
- for _, term := range and.terms {
- val := term.Eval(ctx)
- boolVal, ok := val.(types.Bool)
- // short-circuit on false.
- if ok && boolVal == types.False {
- isFalse = true
- }
- if !ok && !isFalse {
- isUnk := false
- unk, isUnk = types.MaybeMergeUnknowns(val, unk)
- if !isUnk && err == nil {
- if types.IsError(val) {
- err = val
- } else {
- err = types.MaybeNoSuchOverloadErr(val)
- }
- }
- }
- }
- if isFalse {
- return types.False
- }
- if unk != nil {
- return unk
- }
- if err != nil {
- return err
- }
- return types.True
-}
-
-// evalExhaustiveConditional is like evalConditional, but does not short-circuit argument
-// evaluation.
-type evalExhaustiveConditional struct {
- id int64
- adapter types.Adapter
- attr *conditionalAttribute
-}
-
-// ID implements the Interpretable interface method.
-func (cond *evalExhaustiveConditional) ID() int64 {
- return cond.id
-}
-
-// Eval implements the Interpretable interface method.
-func (cond *evalExhaustiveConditional) Eval(ctx Activation) ref.Val {
- cVal := cond.attr.expr.Eval(ctx)
- tVal, tErr := cond.attr.truthy.Resolve(ctx)
- fVal, fErr := cond.attr.falsy.Resolve(ctx)
- cBool, ok := cVal.(types.Bool)
- if !ok {
- return types.ValOrErr(cVal, "no such overload")
- }
- if cBool {
- if tErr != nil {
- return types.WrapErr(tErr)
- }
- return cond.adapter.NativeToValue(tVal)
- }
- if fErr != nil {
- return types.WrapErr(fErr)
- }
- return cond.adapter.NativeToValue(fVal)
-}
-
-// evalAttr evaluates an Attribute value.
-type evalAttr struct {
- adapter types.Adapter
- attr Attribute
- optional bool
-}
-
-var _ InterpretableAttribute = &evalAttr{}
-
-// ID of the attribute instruction.
-func (a *evalAttr) ID() int64 {
- return a.attr.ID()
-}
-
-// AddQualifier implements the InterpretableAttribute interface method.
-func (a *evalAttr) AddQualifier(qual Qualifier) (Attribute, error) {
- attr, err := a.attr.AddQualifier(qual)
- a.attr = attr
- return attr, err
-}
-
-// Attr implements the InterpretableAttribute interface method.
-func (a *evalAttr) Attr() Attribute {
- return a.attr
-}
-
-// Adapter implements the InterpretableAttribute interface method.
-func (a *evalAttr) Adapter() types.Adapter {
- return a.adapter
-}
-
-// Eval implements the Interpretable interface method.
-func (a *evalAttr) Eval(ctx Activation) ref.Val {
- v, err := a.attr.Resolve(ctx)
- if err != nil {
- return types.WrapErr(err)
- }
- return a.adapter.NativeToValue(v)
-}
-
-// Qualify proxies to the Attribute's Qualify method.
-func (a *evalAttr) Qualify(ctx Activation, obj any) (any, error) {
- return a.attr.Qualify(ctx, obj)
-}
-
-// QualifyIfPresent proxies to the Attribute's QualifyIfPresent method.
-func (a *evalAttr) QualifyIfPresent(ctx Activation, obj any, presenceOnly bool) (any, bool, error) {
- return a.attr.QualifyIfPresent(ctx, obj, presenceOnly)
-}
-
-func (a *evalAttr) IsOptional() bool {
- return a.optional
-}
-
-// Resolve proxies to the Attribute's Resolve method.
-func (a *evalAttr) Resolve(ctx Activation) (any, error) {
- return a.attr.Resolve(ctx)
-}
-
-type evalWatchConstructor struct {
- constructor InterpretableConstructor
- observer EvalObserver
-}
-
-// InitVals implements the InterpretableConstructor InitVals function.
-func (c *evalWatchConstructor) InitVals() []Interpretable {
- return c.constructor.InitVals()
-}
-
-// Type implements the InterpretableConstructor Type function.
-func (c *evalWatchConstructor) Type() ref.Type {
- return c.constructor.Type()
-}
-
-// ID implements the Interpretable ID function.
-func (c *evalWatchConstructor) ID() int64 {
- return c.constructor.ID()
-}
-
-// Eval implements the Interpretable Eval function.
-func (c *evalWatchConstructor) Eval(ctx Activation) ref.Val {
- val := c.constructor.Eval(ctx)
- c.observer(c.ID(), c.constructor, val)
- return val
-}
-
-func invalidOptionalEntryInit(field any, value ref.Val) ref.Val {
- return types.NewErr("cannot initialize optional entry '%v' from non-optional value %v", field, value)
-}
-
-func invalidOptionalElementInit(value ref.Val) ref.Val {
- return types.NewErr("cannot initialize optional list element from non-optional value %v", value)
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/interpreter.go b/vendor/github.com/google/cel-go/interpreter/interpreter.go
deleted file mode 100644
index 0aca74d88..000000000
--- a/vendor/github.com/google/cel-go/interpreter/interpreter.go
+++ /dev/null
@@ -1,185 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package interpreter provides functions to evaluate parsed expressions with
-// the option to augment the evaluation with inputs and functions supplied at
-// evaluation time.
-package interpreter
-
-import (
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-// Interpreter generates a new Interpretable from a checked or unchecked expression.
-type Interpreter interface {
- // NewInterpretable creates an Interpretable from a checked expression and an
- // optional list of InterpretableDecorator values.
- NewInterpretable(exprAST *ast.AST, decorators ...InterpretableDecorator) (Interpretable, error)
-}
-
-// EvalObserver is a functional interface that accepts an expression id and an observed value.
-// The id identifies the expression that was evaluated, the programStep is the Interpretable or Qualifier that
-// was evaluated and value is the result of the evaluation.
-type EvalObserver func(id int64, programStep any, value ref.Val)
-
-// Observe constructs a decorator that calls all the provided observers in order after evaluating each Interpretable
-// or Qualifier during program evaluation.
-func Observe(observers ...EvalObserver) InterpretableDecorator {
- if len(observers) == 1 {
- return decObserveEval(observers[0])
- }
- observeFn := func(id int64, programStep any, val ref.Val) {
- for _, observer := range observers {
- observer(id, programStep, val)
- }
- }
- return decObserveEval(observeFn)
-}
-
-// EvalCancelledError represents a cancelled program evaluation operation.
-type EvalCancelledError struct {
- Message string
- // Type identifies the cause of the cancellation.
- Cause CancellationCause
-}
-
-func (e EvalCancelledError) Error() string {
- return e.Message
-}
-
-// CancellationCause enumerates the ways a program evaluation operation can be cancelled.
-type CancellationCause int
-
-const (
- // ContextCancelled indicates that the operation was cancelled in response to a Golang context cancellation.
- ContextCancelled CancellationCause = iota
-
- // CostLimitExceeded indicates that the operation was cancelled in response to the actual cost limit being
- // exceeded.
- CostLimitExceeded
-)
-
-// TODO: Replace all usages of TrackState with EvalStateObserver
-
-// TrackState decorates each expression node with an observer which records the value
-// associated with the given expression id. EvalState must be provided to the decorator.
-// This decorator is not thread-safe, and the EvalState must be reset between Eval()
-// calls.
-// DEPRECATED: Please use EvalStateObserver instead. It composes gracefully with additional observers.
-func TrackState(state EvalState) InterpretableDecorator {
- return Observe(EvalStateObserver(state))
-}
-
-// EvalStateObserver provides an observer which records the value
-// associated with the given expression id. EvalState must be provided to the observer.
-// This decorator is not thread-safe, and the EvalState must be reset between Eval()
-// calls.
-func EvalStateObserver(state EvalState) EvalObserver {
- return func(id int64, programStep any, val ref.Val) {
- state.SetValue(id, val)
- }
-}
-
-// ExhaustiveEval replaces operations that short-circuit with versions that evaluate
-// expressions and couples this behavior with the TrackState() decorator to provide
-// insight into the evaluation state of the entire expression. EvalState must be
-// provided to the decorator. This decorator is not thread-safe, and the EvalState
-// must be reset between Eval() calls.
-func ExhaustiveEval() InterpretableDecorator {
- ex := decDisableShortcircuits()
- return func(i Interpretable) (Interpretable, error) {
- return ex(i)
- }
-}
-
-// InterruptableEval annotates comprehension loops with information that indicates they
-// should check the `#interrupted` state within a custom Activation.
-//
-// The custom activation is currently managed higher up in the stack within the 'cel' package
-// and should not require any custom support on behalf of callers.
-func InterruptableEval() InterpretableDecorator {
- return decInterruptFolds()
-}
-
-// Optimize will pre-compute operations such as list and map construction and optimize
-// call arguments to set membership tests. The set of optimizations will increase over time.
-func Optimize() InterpretableDecorator {
- return decOptimize()
-}
-
-// RegexOptimization provides a way to replace an InterpretableCall for a regex function when the
-// RegexIndex argument is a string constant. Typically, the Factory would compile the regex pattern at
-// RegexIndex and report any errors (at program creation time) and then use the compiled regex for
-// all regex function invocations.
-type RegexOptimization struct {
- // Function is the name of the function to optimize.
- Function string
- // OverloadID is the ID of the overload to optimize.
- OverloadID string
- // RegexIndex is the index position of the regex pattern argument. Only calls to the function where this argument is
- // a string constant will be delegated to this optimizer.
- RegexIndex int
- // Factory constructs a replacement InterpretableCall node that optimizes the regex function call. Factory is
- // provided with the unoptimized regex call and the string constant at the RegexIndex argument.
- // The Factory may compile the regex for use across all invocations of the call, return any errors and
- // return an interpreter.NewCall with the desired regex optimized function impl.
- Factory func(call InterpretableCall, regexPattern string) (InterpretableCall, error)
-}
-
-// CompileRegexConstants compiles regex pattern string constants at program creation time and reports any regex pattern
-// compile errors.
-func CompileRegexConstants(regexOptimizations ...*RegexOptimization) InterpretableDecorator {
- return decRegexOptimizer(regexOptimizations...)
-}
-
-type exprInterpreter struct {
- dispatcher Dispatcher
- container *containers.Container
- provider types.Provider
- adapter types.Adapter
- attrFactory AttributeFactory
-}
-
-// NewInterpreter builds an Interpreter from a Dispatcher and TypeProvider which will be used
-// throughout the Eval of all Interpretable instances generated from it.
-func NewInterpreter(dispatcher Dispatcher,
- container *containers.Container,
- provider types.Provider,
- adapter types.Adapter,
- attrFactory AttributeFactory) Interpreter {
- return &exprInterpreter{
- dispatcher: dispatcher,
- container: container,
- provider: provider,
- adapter: adapter,
- attrFactory: attrFactory}
-}
-
-// NewIntepretable implements the Interpreter interface method.
-func (i *exprInterpreter) NewInterpretable(
- checked *ast.AST,
- decorators ...InterpretableDecorator) (Interpretable, error) {
- p := newPlanner(
- i.dispatcher,
- i.provider,
- i.adapter,
- i.attrFactory,
- i.container,
- checked,
- decorators...)
- return p.Plan(checked.Expr())
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/optimizations.go b/vendor/github.com/google/cel-go/interpreter/optimizations.go
deleted file mode 100644
index 2fc87e693..000000000
--- a/vendor/github.com/google/cel-go/interpreter/optimizations.go
+++ /dev/null
@@ -1,46 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "regexp"
-
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-// MatchesRegexOptimization optimizes the 'matches' standard library function by compiling the regex pattern and
-// reporting any compilation errors at program creation time, and using the compiled regex pattern for all function
-// call invocations.
-var MatchesRegexOptimization = &RegexOptimization{
- Function: "matches",
- RegexIndex: 1,
- Factory: func(call InterpretableCall, regexPattern string) (InterpretableCall, error) {
- compiledRegex, err := regexp.Compile(regexPattern)
- if err != nil {
- return nil, err
- }
- return NewCall(call.ID(), call.Function(), call.OverloadID(), call.Args(), func(values ...ref.Val) ref.Val {
- if len(values) != 2 {
- return types.NoSuchOverloadErr()
- }
- in, ok := values[0].Value().(string)
- if !ok {
- return types.NoSuchOverloadErr()
- }
- return types.Bool(compiledRegex.MatchString(in))
- }), nil
- },
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/planner.go b/vendor/github.com/google/cel-go/interpreter/planner.go
deleted file mode 100644
index cf371f95d..000000000
--- a/vendor/github.com/google/cel-go/interpreter/planner.go
+++ /dev/null
@@ -1,756 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "fmt"
- "strings"
-
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/containers"
- "github.com/google/cel-go/common/functions"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/types"
-)
-
-// interpretablePlanner creates an Interpretable evaluation plan from a proto Expr value.
-type interpretablePlanner interface {
- // Plan generates an Interpretable value (or error) from the input proto Expr.
- Plan(expr ast.Expr) (Interpretable, error)
-}
-
-// newPlanner creates an interpretablePlanner which references a Dispatcher, TypeProvider,
-// TypeAdapter, Container, and CheckedExpr value. These pieces of data are used to resolve
-// functions, types, and namespaced identifiers at plan time rather than at runtime since
-// it only needs to be done once and may be semi-expensive to compute.
-func newPlanner(disp Dispatcher,
- provider types.Provider,
- adapter types.Adapter,
- attrFactory AttributeFactory,
- cont *containers.Container,
- exprAST *ast.AST,
- decorators ...InterpretableDecorator) interpretablePlanner {
- return &planner{
- disp: disp,
- provider: provider,
- adapter: adapter,
- attrFactory: attrFactory,
- container: cont,
- refMap: exprAST.ReferenceMap(),
- typeMap: exprAST.TypeMap(),
- decorators: decorators,
- }
-}
-
-// planner is an implementation of the interpretablePlanner interface.
-type planner struct {
- disp Dispatcher
- provider types.Provider
- adapter types.Adapter
- attrFactory AttributeFactory
- container *containers.Container
- refMap map[int64]*ast.ReferenceInfo
- typeMap map[int64]*types.Type
- decorators []InterpretableDecorator
-}
-
-// Plan implements the interpretablePlanner interface. This implementation of the Plan method also
-// applies decorators to each Interpretable generated as part of the overall plan. Decorators are
-// useful for layering functionality into the evaluation that is not natively understood by CEL,
-// such as state-tracking, expression re-write, and possibly efficient thread-safe memoization of
-// repeated expressions.
-func (p *planner) Plan(expr ast.Expr) (Interpretable, error) {
- switch expr.Kind() {
- case ast.CallKind:
- return p.decorate(p.planCall(expr))
- case ast.IdentKind:
- return p.decorate(p.planIdent(expr))
- case ast.LiteralKind:
- return p.decorate(p.planConst(expr))
- case ast.SelectKind:
- return p.decorate(p.planSelect(expr))
- case ast.ListKind:
- return p.decorate(p.planCreateList(expr))
- case ast.MapKind:
- return p.decorate(p.planCreateMap(expr))
- case ast.StructKind:
- return p.decorate(p.planCreateStruct(expr))
- case ast.ComprehensionKind:
- return p.decorate(p.planComprehension(expr))
- }
- return nil, fmt.Errorf("unsupported expr: %v", expr)
-}
-
-// decorate applies the InterpretableDecorator functions to the given Interpretable.
-// Both the Interpretable and error generated by a Plan step are accepted as arguments
-// for convenience.
-func (p *planner) decorate(i Interpretable, err error) (Interpretable, error) {
- if err != nil {
- return nil, err
- }
- for _, dec := range p.decorators {
- i, err = dec(i)
- if err != nil {
- return nil, err
- }
- }
- return i, nil
-}
-
-// planIdent creates an Interpretable that resolves an identifier from an Activation.
-func (p *planner) planIdent(expr ast.Expr) (Interpretable, error) {
- // Establish whether the identifier is in the reference map.
- if identRef, found := p.refMap[expr.ID()]; found {
- return p.planCheckedIdent(expr.ID(), identRef)
- }
- // Create the possible attribute list for the unresolved reference.
- ident := expr.AsIdent()
- return &evalAttr{
- adapter: p.adapter,
- attr: p.attrFactory.MaybeAttribute(expr.ID(), ident),
- }, nil
-}
-
-func (p *planner) planCheckedIdent(id int64, identRef *ast.ReferenceInfo) (Interpretable, error) {
- // Plan a constant reference if this is the case for this simple identifier.
- if identRef.Value != nil {
- return NewConstValue(id, identRef.Value), nil
- }
-
- // Check to see whether the type map indicates this is a type name. All types should be
- // registered with the provider.
- cType := p.typeMap[id]
- if cType.Kind() == types.TypeKind {
- cVal, found := p.provider.FindIdent(identRef.Name)
- if !found {
- return nil, fmt.Errorf("reference to undefined type: %s", identRef.Name)
- }
- return NewConstValue(id, cVal), nil
- }
-
- // Otherwise, return the attribute for the resolved identifier name.
- return &evalAttr{
- adapter: p.adapter,
- attr: p.attrFactory.AbsoluteAttribute(id, identRef.Name),
- }, nil
-}
-
-// planSelect creates an Interpretable with either:
-//
-// a) selects a field from a map or proto.
-// b) creates a field presence test for a select within a has() macro.
-// c) resolves the select expression to a namespaced identifier.
-func (p *planner) planSelect(expr ast.Expr) (Interpretable, error) {
- // If the Select id appears in the reference map from the CheckedExpr proto then it is either
- // a namespaced identifier or enum value.
- if identRef, found := p.refMap[expr.ID()]; found {
- return p.planCheckedIdent(expr.ID(), identRef)
- }
-
- sel := expr.AsSelect()
- // Plan the operand evaluation.
- op, err := p.Plan(sel.Operand())
- if err != nil {
- return nil, err
- }
- opType := p.typeMap[sel.Operand().ID()]
-
- // If the Select was marked TestOnly, this is a presence test.
- //
- // Note: presence tests are defined for structured (e.g. proto) and dynamic values (map, json)
- // as follows:
- // - True if the object field has a non-default value, e.g. obj.str != ""
- // - True if the dynamic value has the field defined, e.g. key in map
- //
- // However, presence tests are not defined for qualified identifier names with primitive types.
- // If a string named 'a.b.c' is declared in the environment and referenced within `has(a.b.c)`,
- // it is not clear whether has should error or follow the convention defined for structured
- // values.
-
- // Establish the attribute reference.
- attr, isAttr := op.(InterpretableAttribute)
- if !isAttr {
- attr, err = p.relativeAttr(op.ID(), op, false)
- if err != nil {
- return nil, err
- }
- }
-
- // Build a qualifier for the attribute.
- qual, err := p.attrFactory.NewQualifier(opType, expr.ID(), sel.FieldName(), false)
- if err != nil {
- return nil, err
- }
- // Modify the attribute to be test-only.
- if sel.IsTestOnly() {
- attr = &evalTestOnly{
- id: expr.ID(),
- InterpretableAttribute: attr,
- }
- }
- // Append the qualifier on the attribute.
- _, err = attr.AddQualifier(qual)
- return attr, err
-}
-
-// planCall creates a callable Interpretable while specializing for common functions and invocation
-// patterns. Specifically, conditional operators &&, ||, ?:, and (in)equality functions result in
-// optimized Interpretable values.
-func (p *planner) planCall(expr ast.Expr) (Interpretable, error) {
- call := expr.AsCall()
- target, fnName, oName := p.resolveFunction(expr)
- argCount := len(call.Args())
- var offset int
- if target != nil {
- argCount++
- offset++
- }
-
- args := make([]Interpretable, argCount)
- if target != nil {
- arg, err := p.Plan(target)
- if err != nil {
- return nil, err
- }
- args[0] = arg
- }
- for i, argExpr := range call.Args() {
- arg, err := p.Plan(argExpr)
- if err != nil {
- return nil, err
- }
- args[i+offset] = arg
- }
-
- // Generate specialized Interpretable operators by function name if possible.
- switch fnName {
- case operators.LogicalAnd:
- return p.planCallLogicalAnd(expr, args)
- case operators.LogicalOr:
- return p.planCallLogicalOr(expr, args)
- case operators.Conditional:
- return p.planCallConditional(expr, args)
- case operators.Equals:
- return p.planCallEqual(expr, args)
- case operators.NotEquals:
- return p.planCallNotEqual(expr, args)
- case operators.Index:
- return p.planCallIndex(expr, args, false)
- case operators.OptSelect, operators.OptIndex:
- return p.planCallIndex(expr, args, true)
- }
-
- // Otherwise, generate Interpretable calls specialized by argument count.
- // Try to find the specific function by overload id.
- var fnDef *functions.Overload
- if oName != "" {
- fnDef, _ = p.disp.FindOverload(oName)
- }
- // If the overload id couldn't resolve the function, try the simple function name.
- if fnDef == nil {
- fnDef, _ = p.disp.FindOverload(fnName)
- }
- switch argCount {
- case 0:
- return p.planCallZero(expr, fnName, oName, fnDef)
- case 1:
- // If the FunctionOp has been used, then use it as it may exist for the purposes
- // of dynamic dispatch within a singleton function implementation.
- if fnDef != nil && fnDef.Unary == nil && fnDef.Function != nil {
- return p.planCallVarArgs(expr, fnName, oName, fnDef, args)
- }
- return p.planCallUnary(expr, fnName, oName, fnDef, args)
- case 2:
- // If the FunctionOp has been used, then use it as it may exist for the purposes
- // of dynamic dispatch within a singleton function implementation.
- if fnDef != nil && fnDef.Binary == nil && fnDef.Function != nil {
- return p.planCallVarArgs(expr, fnName, oName, fnDef, args)
- }
- return p.planCallBinary(expr, fnName, oName, fnDef, args)
- default:
- return p.planCallVarArgs(expr, fnName, oName, fnDef, args)
- }
-}
-
-// planCallZero generates a zero-arity callable Interpretable.
-func (p *planner) planCallZero(expr ast.Expr,
- function string,
- overload string,
- impl *functions.Overload) (Interpretable, error) {
- if impl == nil || impl.Function == nil {
- return nil, fmt.Errorf("no such overload: %s()", function)
- }
- return &evalZeroArity{
- id: expr.ID(),
- function: function,
- overload: overload,
- impl: impl.Function,
- }, nil
-}
-
-// planCallUnary generates a unary callable Interpretable.
-func (p *planner) planCallUnary(expr ast.Expr,
- function string,
- overload string,
- impl *functions.Overload,
- args []Interpretable) (Interpretable, error) {
- var fn functions.UnaryOp
- var trait int
- var nonStrict bool
- if impl != nil {
- if impl.Unary == nil {
- return nil, fmt.Errorf("no such overload: %s(arg)", function)
- }
- fn = impl.Unary
- trait = impl.OperandTrait
- nonStrict = impl.NonStrict
- }
- return &evalUnary{
- id: expr.ID(),
- function: function,
- overload: overload,
- arg: args[0],
- trait: trait,
- impl: fn,
- nonStrict: nonStrict,
- }, nil
-}
-
-// planCallBinary generates a binary callable Interpretable.
-func (p *planner) planCallBinary(expr ast.Expr,
- function string,
- overload string,
- impl *functions.Overload,
- args []Interpretable) (Interpretable, error) {
- var fn functions.BinaryOp
- var trait int
- var nonStrict bool
- if impl != nil {
- if impl.Binary == nil {
- return nil, fmt.Errorf("no such overload: %s(lhs, rhs)", function)
- }
- fn = impl.Binary
- trait = impl.OperandTrait
- nonStrict = impl.NonStrict
- }
- return &evalBinary{
- id: expr.ID(),
- function: function,
- overload: overload,
- lhs: args[0],
- rhs: args[1],
- trait: trait,
- impl: fn,
- nonStrict: nonStrict,
- }, nil
-}
-
-// planCallVarArgs generates a variable argument callable Interpretable.
-func (p *planner) planCallVarArgs(expr ast.Expr,
- function string,
- overload string,
- impl *functions.Overload,
- args []Interpretable) (Interpretable, error) {
- var fn functions.FunctionOp
- var trait int
- var nonStrict bool
- if impl != nil {
- if impl.Function == nil {
- return nil, fmt.Errorf("no such overload: %s(...)", function)
- }
- fn = impl.Function
- trait = impl.OperandTrait
- nonStrict = impl.NonStrict
- }
- return &evalVarArgs{
- id: expr.ID(),
- function: function,
- overload: overload,
- args: args,
- trait: trait,
- impl: fn,
- nonStrict: nonStrict,
- }, nil
-}
-
-// planCallEqual generates an equals (==) Interpretable.
-func (p *planner) planCallEqual(expr ast.Expr, args []Interpretable) (Interpretable, error) {
- return &evalEq{
- id: expr.ID(),
- lhs: args[0],
- rhs: args[1],
- }, nil
-}
-
-// planCallNotEqual generates a not equals (!=) Interpretable.
-func (p *planner) planCallNotEqual(expr ast.Expr, args []Interpretable) (Interpretable, error) {
- return &evalNe{
- id: expr.ID(),
- lhs: args[0],
- rhs: args[1],
- }, nil
-}
-
-// planCallLogicalAnd generates a logical and (&&) Interpretable.
-func (p *planner) planCallLogicalAnd(expr ast.Expr, args []Interpretable) (Interpretable, error) {
- return &evalAnd{
- id: expr.ID(),
- terms: args,
- }, nil
-}
-
-// planCallLogicalOr generates a logical or (||) Interpretable.
-func (p *planner) planCallLogicalOr(expr ast.Expr, args []Interpretable) (Interpretable, error) {
- return &evalOr{
- id: expr.ID(),
- terms: args,
- }, nil
-}
-
-// planCallConditional generates a conditional / ternary (c ? t : f) Interpretable.
-func (p *planner) planCallConditional(expr ast.Expr, args []Interpretable) (Interpretable, error) {
- cond := args[0]
- t := args[1]
- var tAttr Attribute
- truthyAttr, isTruthyAttr := t.(InterpretableAttribute)
- if isTruthyAttr {
- tAttr = truthyAttr.Attr()
- } else {
- tAttr = p.attrFactory.RelativeAttribute(t.ID(), t)
- }
-
- f := args[2]
- var fAttr Attribute
- falsyAttr, isFalsyAttr := f.(InterpretableAttribute)
- if isFalsyAttr {
- fAttr = falsyAttr.Attr()
- } else {
- fAttr = p.attrFactory.RelativeAttribute(f.ID(), f)
- }
-
- return &evalAttr{
- adapter: p.adapter,
- attr: p.attrFactory.ConditionalAttribute(expr.ID(), cond, tAttr, fAttr),
- }, nil
-}
-
-// planCallIndex either extends an attribute with the argument to the index operation, or creates
-// a relative attribute based on the return of a function call or operation.
-func (p *planner) planCallIndex(expr ast.Expr, args []Interpretable, optional bool) (Interpretable, error) {
- op := args[0]
- ind := args[1]
- opType := p.typeMap[op.ID()]
-
- // Establish the attribute reference.
- var err error
- attr, isAttr := op.(InterpretableAttribute)
- if !isAttr {
- attr, err = p.relativeAttr(op.ID(), op, false)
- if err != nil {
- return nil, err
- }
- }
-
- // Construct the qualifier type.
- var qual Qualifier
- switch ind := ind.(type) {
- case InterpretableConst:
- qual, err = p.attrFactory.NewQualifier(opType, expr.ID(), ind.Value(), optional)
- case InterpretableAttribute:
- qual, err = p.attrFactory.NewQualifier(opType, expr.ID(), ind, optional)
- default:
- qual, err = p.relativeAttr(expr.ID(), ind, optional)
- }
- if err != nil {
- return nil, err
- }
-
- // Add the qualifier to the attribute
- _, err = attr.AddQualifier(qual)
- return attr, err
-}
-
-// planCreateList generates a list construction Interpretable.
-func (p *planner) planCreateList(expr ast.Expr) (Interpretable, error) {
- list := expr.AsList()
- optionalIndices := list.OptionalIndices()
- elements := list.Elements()
- optionals := make([]bool, len(elements))
- for _, index := range optionalIndices {
- if index < 0 || index >= int32(len(elements)) {
- return nil, fmt.Errorf("optional index %d out of element bounds [0, %d]", index, len(elements))
- }
- optionals[index] = true
- }
- elems := make([]Interpretable, len(elements))
- for i, elem := range elements {
- elemVal, err := p.Plan(elem)
- if err != nil {
- return nil, err
- }
- elems[i] = elemVal
- }
- return &evalList{
- id: expr.ID(),
- elems: elems,
- optionals: optionals,
- hasOptionals: len(optionals) != 0,
- adapter: p.adapter,
- }, nil
-}
-
-// planCreateStruct generates a map or object construction Interpretable.
-func (p *planner) planCreateMap(expr ast.Expr) (Interpretable, error) {
- m := expr.AsMap()
- entries := m.Entries()
- optionals := make([]bool, len(entries))
- keys := make([]Interpretable, len(entries))
- vals := make([]Interpretable, len(entries))
- for i, e := range entries {
- entry := e.AsMapEntry()
- keyVal, err := p.Plan(entry.Key())
- if err != nil {
- return nil, err
- }
- keys[i] = keyVal
-
- valVal, err := p.Plan(entry.Value())
- if err != nil {
- return nil, err
- }
- vals[i] = valVal
- optionals[i] = entry.IsOptional()
- }
- return &evalMap{
- id: expr.ID(),
- keys: keys,
- vals: vals,
- optionals: optionals,
- hasOptionals: len(optionals) != 0,
- adapter: p.adapter,
- }, nil
-}
-
-// planCreateObj generates an object construction Interpretable.
-func (p *planner) planCreateStruct(expr ast.Expr) (Interpretable, error) {
- obj := expr.AsStruct()
- typeName, defined := p.resolveTypeName(obj.TypeName())
- if !defined {
- return nil, fmt.Errorf("unknown type: %s", obj.TypeName())
- }
- objFields := obj.Fields()
- optionals := make([]bool, len(objFields))
- fields := make([]string, len(objFields))
- vals := make([]Interpretable, len(objFields))
- for i, f := range objFields {
- field := f.AsStructField()
- fields[i] = field.Name()
- val, err := p.Plan(field.Value())
- if err != nil {
- return nil, err
- }
- vals[i] = val
- optionals[i] = field.IsOptional()
- }
- return &evalObj{
- id: expr.ID(),
- typeName: typeName,
- fields: fields,
- vals: vals,
- optionals: optionals,
- hasOptionals: len(optionals) != 0,
- provider: p.provider,
- }, nil
-}
-
-// planComprehension generates an Interpretable fold operation.
-func (p *planner) planComprehension(expr ast.Expr) (Interpretable, error) {
- fold := expr.AsComprehension()
- accu, err := p.Plan(fold.AccuInit())
- if err != nil {
- return nil, err
- }
- iterRange, err := p.Plan(fold.IterRange())
- if err != nil {
- return nil, err
- }
- cond, err := p.Plan(fold.LoopCondition())
- if err != nil {
- return nil, err
- }
- step, err := p.Plan(fold.LoopStep())
- if err != nil {
- return nil, err
- }
- result, err := p.Plan(fold.Result())
- if err != nil {
- return nil, err
- }
- return &evalFold{
- id: expr.ID(),
- accuVar: fold.AccuVar(),
- accu: accu,
- iterVar: fold.IterVar(),
- iterRange: iterRange,
- cond: cond,
- step: step,
- result: result,
- adapter: p.adapter,
- }, nil
-}
-
-// planConst generates a constant valued Interpretable.
-func (p *planner) planConst(expr ast.Expr) (Interpretable, error) {
- return NewConstValue(expr.ID(), expr.AsLiteral()), nil
-}
-
-// resolveTypeName takes a qualified string constructed at parse time, applies the proto
-// namespace resolution rules to it in a scan over possible matching types in the TypeProvider.
-func (p *planner) resolveTypeName(typeName string) (string, bool) {
- for _, qualifiedTypeName := range p.container.ResolveCandidateNames(typeName) {
- if _, found := p.provider.FindStructType(qualifiedTypeName); found {
- return qualifiedTypeName, true
- }
- }
- return "", false
-}
-
-// resolveFunction determines the call target, function name, and overload name from a given Expr
-// value.
-//
-// The resolveFunction resolves ambiguities where a function may either be a receiver-style
-// invocation or a qualified global function name.
-// - The target expression may only consist of ident and select expressions.
-// - The function is declared in the environment using its fully-qualified name.
-// - The fully-qualified function name matches the string serialized target value.
-func (p *planner) resolveFunction(expr ast.Expr) (ast.Expr, string, string) {
- // Note: similar logic exists within the `checker/checker.go`. If making changes here
- // please consider the impact on checker.go and consolidate implementations or mirror code
- // as appropriate.
- call := expr.AsCall()
- var target ast.Expr = nil
- if call.IsMemberFunction() {
- target = call.Target()
- }
- fnName := call.FunctionName()
-
- // Checked expressions always have a reference map entry, and _should_ have the fully qualified
- // function name as the fnName value.
- oRef, hasOverload := p.refMap[expr.ID()]
- if hasOverload {
- if len(oRef.OverloadIDs) == 1 {
- return target, fnName, oRef.OverloadIDs[0]
- }
- // Note, this namespaced function name will not appear as a fully qualified name in ASTs
- // built and stored before cel-go v0.5.0; however, this functionality did not work at all
- // before the v0.5.0 release.
- return target, fnName, ""
- }
-
- // Parse-only expressions need to handle the same logic as is normally performed at check time,
- // but with potentially much less information. The only reliable source of information about
- // which functions are configured is the dispatcher.
- if target == nil {
- // If the user has a parse-only expression, then it should have been configured as such in
- // the interpreter dispatcher as it may have been omitted from the checker environment.
- for _, qualifiedName := range p.container.ResolveCandidateNames(fnName) {
- _, found := p.disp.FindOverload(qualifiedName)
- if found {
- return nil, qualifiedName, ""
- }
- }
- // It's possible that the overload was not found, but this situation is accounted for in
- // the planCall phase; however, the leading dot used for denoting fully-qualified
- // namespaced identifiers must be stripped, as all declarations already use fully-qualified
- // names. This stripping behavior is handled automatically by the ResolveCandidateNames
- // call.
- return target, stripLeadingDot(fnName), ""
- }
-
- // Handle the situation where the function target actually indicates a qualified function name.
- qualifiedPrefix, maybeQualified := p.toQualifiedName(target)
- if maybeQualified {
- maybeQualifiedName := qualifiedPrefix + "." + fnName
- for _, qualifiedName := range p.container.ResolveCandidateNames(maybeQualifiedName) {
- _, found := p.disp.FindOverload(qualifiedName)
- if found {
- // Clear the target to ensure the proper arity is used for finding the
- // implementation.
- return nil, qualifiedName, ""
- }
- }
- }
- // In the default case, the function is exactly as it was advertised: a receiver call on with
- // an expression-based target with the given simple function name.
- return target, fnName, ""
-}
-
-// relativeAttr indicates that the attribute in this case acts as a qualifier and as such needs to
-// be observed to ensure that it's evaluation value is properly recorded for state tracking.
-func (p *planner) relativeAttr(id int64, eval Interpretable, opt bool) (InterpretableAttribute, error) {
- eAttr, ok := eval.(InterpretableAttribute)
- if !ok {
- eAttr = &evalAttr{
- adapter: p.adapter,
- attr: p.attrFactory.RelativeAttribute(id, eval),
- optional: opt,
- }
- }
- // This looks like it should either decorate the new evalAttr node, or early return the InterpretableAttribute
- decAttr, err := p.decorate(eAttr, nil)
- if err != nil {
- return nil, err
- }
- eAttr, ok = decAttr.(InterpretableAttribute)
- if !ok {
- return nil, fmt.Errorf("invalid attribute decoration: %v(%T)", decAttr, decAttr)
- }
- return eAttr, nil
-}
-
-// toQualifiedName converts an expression AST into a qualified name if possible, with a boolean
-// 'found' value that indicates if the conversion is successful.
-func (p *planner) toQualifiedName(operand ast.Expr) (string, bool) {
- // If the checker identified the expression as an attribute by the type-checker, then it can't
- // possibly be part of qualified name in a namespace.
- _, isAttr := p.refMap[operand.ID()]
- if isAttr {
- return "", false
- }
- // Since functions cannot be both namespaced and receiver functions, if the operand is not an
- // qualified variable name, return the (possibly) qualified name given the expressions.
- switch operand.Kind() {
- case ast.IdentKind:
- id := operand.AsIdent()
- return id, true
- case ast.SelectKind:
- sel := operand.AsSelect()
- // Test only expressions are not valid as qualified names.
- if sel.IsTestOnly() {
- return "", false
- }
- if qual, found := p.toQualifiedName(sel.Operand()); found {
- return qual + "." + sel.FieldName(), true
- }
- }
- return "", false
-}
-
-func stripLeadingDot(name string) string {
- if strings.HasPrefix(name, ".") {
- return name[1:]
- }
- return name
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/prune.go b/vendor/github.com/google/cel-go/interpreter/prune.go
deleted file mode 100644
index 410d80dc4..000000000
--- a/vendor/github.com/google/cel-go/interpreter/prune.go
+++ /dev/null
@@ -1,543 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-)
-
-type astPruner struct {
- ast.ExprFactory
- expr ast.Expr
- macroCalls map[int64]ast.Expr
- state EvalState
- nextExprID int64
-}
-
-// TODO Consider having a separate walk of the AST that finds common
-// subexpressions. This can be called before or after constant folding to find
-// common subexpressions.
-
-// PruneAst prunes the given AST based on the given EvalState and generates a new AST.
-// Given AST is copied on write and a new AST is returned.
-// Couple of typical use cases this interface would be:
-//
-// A)
-// 1) Evaluate expr with some unknowns,
-// 2) If result is unknown:
-//
-// a) PruneAst
-// b) Goto 1
-//
-// Functional call results which are known would be effectively cached across
-// iterations.
-//
-// B)
-// 1) Compile the expression (maybe via a service and maybe after checking a
-//
-// compiled expression does not exists in local cache)
-//
-// 2) Prepare the environment and the interpreter. Activation might be empty.
-// 3) Eval the expression. This might return unknown or error or a concrete
-//
-// value.
-//
-// 4) PruneAst
-// 4) Maybe cache the expression
-// This is effectively constant folding the expression. How the environment is
-// prepared in step 2 is flexible. For example, If the caller caches the
-// compiled and constant folded expressions, but is not willing to constant
-// fold(and thus cache results of) some external calls, then they can prepare
-// the overloads accordingly.
-func PruneAst(expr ast.Expr, macroCalls map[int64]ast.Expr, state EvalState) *ast.AST {
- pruneState := NewEvalState()
- for _, id := range state.IDs() {
- v, _ := state.Value(id)
- pruneState.SetValue(id, v)
- }
- pruner := &astPruner{
- ExprFactory: ast.NewExprFactory(),
- expr: expr,
- macroCalls: macroCalls,
- state: pruneState,
- nextExprID: getMaxID(expr)}
- newExpr, _ := pruner.maybePrune(expr)
- newInfo := ast.NewSourceInfo(nil)
- for id, call := range pruner.macroCalls {
- newInfo.SetMacroCall(id, call)
- }
- return ast.NewAST(newExpr, newInfo)
-}
-
-func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (ast.Expr, bool) {
- switch v := val.(type) {
- case types.Bool, types.Bytes, types.Double, types.Int, types.Null, types.String, types.Uint:
- p.state.SetValue(id, val)
- return p.NewLiteral(id, val), true
- case types.Duration:
- p.state.SetValue(id, val)
- durationString := v.ConvertToType(types.StringType).(types.String)
- return p.NewCall(id, overloads.TypeConvertDuration, p.NewLiteral(p.nextID(), durationString)), true
- case types.Timestamp:
- timestampString := v.ConvertToType(types.StringType).(types.String)
- return p.NewCall(id, overloads.TypeConvertTimestamp, p.NewLiteral(p.nextID(), timestampString)), true
- }
-
- // Attempt to build a list literal.
- if list, isList := val.(traits.Lister); isList {
- sz := list.Size().(types.Int)
- elemExprs := make([]ast.Expr, sz)
- for i := types.Int(0); i < sz; i++ {
- elem := list.Get(i)
- if types.IsUnknownOrError(elem) {
- return nil, false
- }
- elemExpr, ok := p.maybeCreateLiteral(p.nextID(), elem)
- if !ok {
- return nil, false
- }
- elemExprs[i] = elemExpr
- }
- p.state.SetValue(id, val)
- return p.NewList(id, elemExprs, []int32{}), true
- }
-
- // Create a map literal if possible.
- if mp, isMap := val.(traits.Mapper); isMap {
- it := mp.Iterator()
- entries := make([]ast.EntryExpr, mp.Size().(types.Int))
- i := 0
- for it.HasNext() != types.False {
- key := it.Next()
- val := mp.Get(key)
- if types.IsUnknownOrError(key) || types.IsUnknownOrError(val) {
- return nil, false
- }
- keyExpr, ok := p.maybeCreateLiteral(p.nextID(), key)
- if !ok {
- return nil, false
- }
- valExpr, ok := p.maybeCreateLiteral(p.nextID(), val)
- if !ok {
- return nil, false
- }
- entry := p.NewMapEntry(p.nextID(), keyExpr, valExpr, false)
- entries[i] = entry
- i++
- }
- p.state.SetValue(id, val)
- return p.NewMap(id, entries), true
- }
-
- // TODO(issues/377) To construct message literals, the type provider will need to support
- // the enumeration the fields for a given message.
- return nil, false
-}
-
-func (p *astPruner) maybePruneOptional(elem ast.Expr) (ast.Expr, bool) {
- elemVal, found := p.value(elem.ID())
- if found && elemVal.Type() == types.OptionalType {
- opt := elemVal.(*types.Optional)
- if !opt.HasValue() {
- return nil, true
- }
- if newElem, pruned := p.maybeCreateLiteral(elem.ID(), opt.GetValue()); pruned {
- return newElem, true
- }
- }
- return elem, false
-}
-
-func (p *astPruner) maybePruneIn(node ast.Expr) (ast.Expr, bool) {
- // elem in list
- call := node.AsCall()
- val, exists := p.maybeValue(call.Args()[1].ID())
- if !exists {
- return nil, false
- }
- if sz, ok := val.(traits.Sizer); ok && sz.Size() == types.IntZero {
- return p.maybeCreateLiteral(node.ID(), types.False)
- }
- return nil, false
-}
-
-func (p *astPruner) maybePruneLogicalNot(node ast.Expr) (ast.Expr, bool) {
- call := node.AsCall()
- arg := call.Args()[0]
- val, exists := p.maybeValue(arg.ID())
- if !exists {
- return nil, false
- }
- if b, ok := val.(types.Bool); ok {
- return p.maybeCreateLiteral(node.ID(), !b)
- }
- return nil, false
-}
-
-func (p *astPruner) maybePruneOr(node ast.Expr) (ast.Expr, bool) {
- call := node.AsCall()
- // We know result is unknown, so we have at least one unknown arg
- // and if one side is a known value, we know we can ignore it.
- if v, exists := p.maybeValue(call.Args()[0].ID()); exists {
- if v == types.True {
- return p.maybeCreateLiteral(node.ID(), types.True)
- }
- return call.Args()[1], true
- }
- if v, exists := p.maybeValue(call.Args()[1].ID()); exists {
- if v == types.True {
- return p.maybeCreateLiteral(node.ID(), types.True)
- }
- return call.Args()[0], true
- }
- return nil, false
-}
-
-func (p *astPruner) maybePruneAnd(node ast.Expr) (ast.Expr, bool) {
- call := node.AsCall()
- // We know result is unknown, so we have at least one unknown arg
- // and if one side is a known value, we know we can ignore it.
- if v, exists := p.maybeValue(call.Args()[0].ID()); exists {
- if v == types.False {
- return p.maybeCreateLiteral(node.ID(), types.False)
- }
- return call.Args()[1], true
- }
- if v, exists := p.maybeValue(call.Args()[1].ID()); exists {
- if v == types.False {
- return p.maybeCreateLiteral(node.ID(), types.False)
- }
- return call.Args()[0], true
- }
- return nil, false
-}
-
-func (p *astPruner) maybePruneConditional(node ast.Expr) (ast.Expr, bool) {
- call := node.AsCall()
- cond, exists := p.maybeValue(call.Args()[0].ID())
- if !exists {
- return nil, false
- }
- if cond.Value().(bool) {
- return call.Args()[1], true
- }
- return call.Args()[2], true
-}
-
-func (p *astPruner) maybePruneFunction(node ast.Expr) (ast.Expr, bool) {
- if _, exists := p.value(node.ID()); !exists {
- return nil, false
- }
- call := node.AsCall()
- if call.FunctionName() == operators.LogicalOr {
- return p.maybePruneOr(node)
- }
- if call.FunctionName() == operators.LogicalAnd {
- return p.maybePruneAnd(node)
- }
- if call.FunctionName() == operators.Conditional {
- return p.maybePruneConditional(node)
- }
- if call.FunctionName() == operators.In {
- return p.maybePruneIn(node)
- }
- if call.FunctionName() == operators.LogicalNot {
- return p.maybePruneLogicalNot(node)
- }
- return nil, false
-}
-
-func (p *astPruner) maybePrune(node ast.Expr) (ast.Expr, bool) {
- return p.prune(node)
-}
-
-func (p *astPruner) prune(node ast.Expr) (ast.Expr, bool) {
- if node == nil {
- return node, false
- }
- val, valueExists := p.maybeValue(node.ID())
- if valueExists {
- if newNode, ok := p.maybeCreateLiteral(node.ID(), val); ok {
- delete(p.macroCalls, node.ID())
- return newNode, true
- }
- }
- if macro, found := p.macroCalls[node.ID()]; found {
- // Ensure that intermediate values for the comprehension are cleared during pruning
- if node.Kind() == ast.ComprehensionKind {
- compre := node.AsComprehension()
- visit(macro, clearIterVarVisitor(compre.IterVar(), p.state))
- }
- // prune the expression in terms of the macro call instead of the expanded form.
- if newMacro, pruned := p.prune(macro); pruned {
- p.macroCalls[node.ID()] = newMacro
- }
- }
-
- // We have either an unknown/error value, or something we don't want to
- // transform, or expression was not evaluated. If possible, drill down
- // more.
- switch node.Kind() {
- case ast.SelectKind:
- sel := node.AsSelect()
- if operand, isPruned := p.maybePrune(sel.Operand()); isPruned {
- if sel.IsTestOnly() {
- return p.NewPresenceTest(node.ID(), operand, sel.FieldName()), true
- }
- return p.NewSelect(node.ID(), operand, sel.FieldName()), true
- }
- case ast.CallKind:
- argsPruned := false
- call := node.AsCall()
- args := call.Args()
- newArgs := make([]ast.Expr, len(args))
- for i, a := range args {
- newArgs[i] = a
- if arg, isPruned := p.maybePrune(a); isPruned {
- argsPruned = true
- newArgs[i] = arg
- }
- }
- if !call.IsMemberFunction() {
- newCall := p.NewCall(node.ID(), call.FunctionName(), newArgs...)
- if prunedCall, isPruned := p.maybePruneFunction(newCall); isPruned {
- return prunedCall, true
- }
- return newCall, argsPruned
- }
- newTarget := call.Target()
- targetPruned := false
- if prunedTarget, isPruned := p.maybePrune(call.Target()); isPruned {
- targetPruned = true
- newTarget = prunedTarget
- }
- newCall := p.NewMemberCall(node.ID(), call.FunctionName(), newTarget, newArgs...)
- if prunedCall, isPruned := p.maybePruneFunction(newCall); isPruned {
- return prunedCall, true
- }
- return newCall, targetPruned || argsPruned
- case ast.ListKind:
- l := node.AsList()
- elems := l.Elements()
- optIndices := l.OptionalIndices()
- optIndexMap := map[int32]bool{}
- for _, i := range optIndices {
- optIndexMap[i] = true
- }
- newOptIndexMap := make(map[int32]bool, len(optIndexMap))
- newElems := make([]ast.Expr, 0, len(elems))
- var listPruned bool
- prunedIdx := 0
- for i, elem := range elems {
- _, isOpt := optIndexMap[int32(i)]
- if isOpt {
- newElem, pruned := p.maybePruneOptional(elem)
- if pruned {
- listPruned = true
- if newElem != nil {
- newElems = append(newElems, newElem)
- prunedIdx++
- }
- continue
- }
- newOptIndexMap[int32(prunedIdx)] = true
- }
- if newElem, prunedElem := p.maybePrune(elem); prunedElem {
- newElems = append(newElems, newElem)
- listPruned = true
- } else {
- newElems = append(newElems, elem)
- }
- prunedIdx++
- }
- optIndices = make([]int32, len(newOptIndexMap))
- idx := 0
- for i := range newOptIndexMap {
- optIndices[idx] = i
- idx++
- }
- if listPruned {
- return p.NewList(node.ID(), newElems, optIndices), true
- }
- case ast.MapKind:
- var mapPruned bool
- m := node.AsMap()
- entries := m.Entries()
- newEntries := make([]ast.EntryExpr, len(entries))
- for i, entry := range entries {
- newEntries[i] = entry
- e := entry.AsMapEntry()
- newKey, keyPruned := p.maybePrune(e.Key())
- newValue, valuePruned := p.maybePrune(e.Value())
- if !keyPruned && !valuePruned {
- continue
- }
- mapPruned = true
- newEntry := p.NewMapEntry(entry.ID(), newKey, newValue, e.IsOptional())
- newEntries[i] = newEntry
- }
- if mapPruned {
- return p.NewMap(node.ID(), newEntries), true
- }
- case ast.StructKind:
- var structPruned bool
- obj := node.AsStruct()
- fields := obj.Fields()
- newFields := make([]ast.EntryExpr, len(fields))
- for i, field := range fields {
- newFields[i] = field
- f := field.AsStructField()
- newValue, prunedValue := p.maybePrune(f.Value())
- if !prunedValue {
- continue
- }
- structPruned = true
- newEntry := p.NewStructField(field.ID(), f.Name(), newValue, f.IsOptional())
- newFields[i] = newEntry
- }
- if structPruned {
- return p.NewStruct(node.ID(), obj.TypeName(), newFields), true
- }
- case ast.ComprehensionKind:
- compre := node.AsComprehension()
- // Only the range of the comprehension is pruned since the state tracking only records
- // the last iteration of the comprehension and not each step in the evaluation which
- // means that the any residuals computed in between might be inaccurate.
- if newRange, pruned := p.maybePrune(compre.IterRange()); pruned {
- return p.NewComprehension(
- node.ID(),
- newRange,
- compre.IterVar(),
- compre.AccuVar(),
- compre.AccuInit(),
- compre.LoopCondition(),
- compre.LoopStep(),
- compre.Result(),
- ), true
- }
- }
- return node, false
-}
-
-func (p *astPruner) value(id int64) (ref.Val, bool) {
- val, found := p.state.Value(id)
- return val, (found && val != nil)
-}
-
-func (p *astPruner) maybeValue(id int64) (ref.Val, bool) {
- val, found := p.value(id)
- if !found || types.IsUnknownOrError(val) {
- return nil, false
- }
- return val, true
-}
-
-func (p *astPruner) nextID() int64 {
- next := p.nextExprID
- p.nextExprID++
- return next
-}
-
-type astVisitor struct {
- // visitEntry is called on every expr node, including those within a map/struct entry.
- visitExpr func(expr ast.Expr)
- // visitEntry is called before entering the key, value of a map/struct entry.
- visitEntry func(entry ast.EntryExpr)
-}
-
-func getMaxID(expr ast.Expr) int64 {
- maxID := int64(1)
- visit(expr, maxIDVisitor(&maxID))
- return maxID
-}
-
-func clearIterVarVisitor(varName string, state EvalState) astVisitor {
- return astVisitor{
- visitExpr: func(e ast.Expr) {
- if e.Kind() == ast.IdentKind && e.AsIdent() == varName {
- state.SetValue(e.ID(), nil)
- }
- },
- }
-}
-
-func maxIDVisitor(maxID *int64) astVisitor {
- return astVisitor{
- visitExpr: func(e ast.Expr) {
- if e.ID() >= *maxID {
- *maxID = e.ID() + 1
- }
- },
- visitEntry: func(e ast.EntryExpr) {
- if e.ID() >= *maxID {
- *maxID = e.ID() + 1
- }
- },
- }
-}
-
-func visit(expr ast.Expr, visitor astVisitor) {
- exprs := []ast.Expr{expr}
- for len(exprs) != 0 {
- e := exprs[0]
- if visitor.visitExpr != nil {
- visitor.visitExpr(e)
- }
- exprs = exprs[1:]
- switch e.Kind() {
- case ast.SelectKind:
- exprs = append(exprs, e.AsSelect().Operand())
- case ast.CallKind:
- call := e.AsCall()
- if call.Target() != nil {
- exprs = append(exprs, call.Target())
- }
- exprs = append(exprs, call.Args()...)
- case ast.ComprehensionKind:
- compre := e.AsComprehension()
- exprs = append(exprs,
- compre.IterRange(),
- compre.AccuInit(),
- compre.LoopCondition(),
- compre.LoopStep(),
- compre.Result())
- case ast.ListKind:
- list := e.AsList()
- exprs = append(exprs, list.Elements()...)
- case ast.MapKind:
- for _, entry := range e.AsMap().Entries() {
- e := entry.AsMapEntry()
- if visitor.visitEntry != nil {
- visitor.visitEntry(entry)
- }
- exprs = append(exprs, e.Key())
- exprs = append(exprs, e.Value())
- }
- case ast.StructKind:
- for _, entry := range e.AsStruct().Fields() {
- f := entry.AsStructField()
- if visitor.visitEntry != nil {
- visitor.visitEntry(entry)
- }
- exprs = append(exprs, f.Value())
- }
- }
- }
-}
diff --git a/vendor/github.com/google/cel-go/interpreter/runtimecost.go b/vendor/github.com/google/cel-go/interpreter/runtimecost.go
deleted file mode 100644
index b9b307c15..000000000
--- a/vendor/github.com/google/cel-go/interpreter/runtimecost.go
+++ /dev/null
@@ -1,316 +0,0 @@
-// Copyright 2022 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package interpreter
-
-import (
- "math"
-
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/overloads"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
- "github.com/google/cel-go/common/types/traits"
-)
-
-// WARNING: Any changes to cost calculations in this file require a corresponding change in checker/cost.go
-
-// ActualCostEstimator provides function call cost estimations at runtime
-// CallCost returns an estimated cost for the function overload invocation with the given args, or nil if it has no
-// estimate to provide. CEL attempts to provide reasonable estimates for its standard function library, so CallCost
-// should typically not need to provide an estimate for CELs standard function.
-type ActualCostEstimator interface {
- CallCost(function, overloadID string, args []ref.Val, result ref.Val) *uint64
-}
-
-// CostObserver provides an observer that tracks runtime cost.
-func CostObserver(tracker *CostTracker) EvalObserver {
- observer := func(id int64, programStep any, val ref.Val) {
- switch t := programStep.(type) {
- case ConstantQualifier:
- // TODO: Push identifiers on to the stack before observing constant qualifiers that apply to them
- // and enable the below pop. Once enabled this can case can be collapsed into the Qualifier case.
- tracker.cost++
- case InterpretableConst:
- // zero cost
- case InterpretableAttribute:
- switch a := t.Attr().(type) {
- case *conditionalAttribute:
- // Ternary has no direct cost. All cost is from the conditional and the true/false branch expressions.
- tracker.stack.drop(a.falsy.ID(), a.truthy.ID(), a.expr.ID())
- default:
- tracker.stack.drop(t.Attr().ID())
- tracker.cost += common.SelectAndIdentCost
- }
- if !tracker.presenceTestHasCost {
- if _, isTestOnly := programStep.(*evalTestOnly); isTestOnly {
- tracker.cost -= common.SelectAndIdentCost
- }
- }
- case *evalExhaustiveConditional:
- // Ternary has no direct cost. All cost is from the conditional and the true/false branch expressions.
- tracker.stack.drop(t.attr.falsy.ID(), t.attr.truthy.ID(), t.attr.expr.ID())
-
- // While the field names are identical, the boolean operation eval structs do not share an interface and so
- // must be handled individually.
- case *evalOr:
- for _, term := range t.terms {
- tracker.stack.drop(term.ID())
- }
- case *evalAnd:
- for _, term := range t.terms {
- tracker.stack.drop(term.ID())
- }
- case *evalExhaustiveOr:
- for _, term := range t.terms {
- tracker.stack.drop(term.ID())
- }
- case *evalExhaustiveAnd:
- for _, term := range t.terms {
- tracker.stack.drop(term.ID())
- }
- case *evalFold:
- tracker.stack.drop(t.iterRange.ID())
- case Qualifier:
- tracker.cost++
- case InterpretableCall:
- if argVals, ok := tracker.stack.dropArgs(t.Args()); ok {
- tracker.cost += tracker.costCall(t, argVals, val)
- }
- case InterpretableConstructor:
- tracker.stack.dropArgs(t.InitVals())
- switch t.Type() {
- case types.ListType:
- tracker.cost += common.ListCreateBaseCost
- case types.MapType:
- tracker.cost += common.MapCreateBaseCost
- default:
- tracker.cost += common.StructCreateBaseCost
- }
- }
- tracker.stack.push(val, id)
-
- if tracker.Limit != nil && tracker.cost > *tracker.Limit {
- panic(EvalCancelledError{Cause: CostLimitExceeded, Message: "operation cancelled: actual cost limit exceeded"})
- }
- }
- return observer
-}
-
-// CostTrackerOption configures the behavior of CostTracker objects.
-type CostTrackerOption func(*CostTracker) error
-
-// CostTrackerLimit sets the runtime limit on the evaluation cost during execution and will terminate the expression
-// evaluation if the limit is exceeded.
-func CostTrackerLimit(limit uint64) CostTrackerOption {
- return func(tracker *CostTracker) error {
- tracker.Limit = &limit
- return nil
- }
-}
-
-// PresenceTestHasCost determines whether presence testing has a cost of one or zero.
-// Defaults to presence test has a cost of one.
-func PresenceTestHasCost(hasCost bool) CostTrackerOption {
- return func(tracker *CostTracker) error {
- tracker.presenceTestHasCost = hasCost
- return nil
- }
-}
-
-// NewCostTracker creates a new CostTracker with a given estimator and a set of functional CostTrackerOption values.
-func NewCostTracker(estimator ActualCostEstimator, opts ...CostTrackerOption) (*CostTracker, error) {
- tracker := &CostTracker{
- Estimator: estimator,
- overloadTrackers: map[string]FunctionTracker{},
- presenceTestHasCost: true,
- }
- for _, opt := range opts {
- err := opt(tracker)
- if err != nil {
- return nil, err
- }
- }
- return tracker, nil
-}
-
-// OverloadCostTracker binds an overload ID to a runtime FunctionTracker implementation.
-//
-// OverloadCostTracker instances augment or override ActualCostEstimator decisions, allowing for versioned and/or
-// optional cost tracking changes.
-func OverloadCostTracker(overloadID string, fnTracker FunctionTracker) CostTrackerOption {
- return func(tracker *CostTracker) error {
- tracker.overloadTrackers[overloadID] = fnTracker
- return nil
- }
-}
-
-// FunctionTracker computes the actual cost of evaluating the functions with the given arguments and result.
-type FunctionTracker func(args []ref.Val, result ref.Val) *uint64
-
-// CostTracker represents the information needed for tracking runtime cost.
-type CostTracker struct {
- Estimator ActualCostEstimator
- overloadTrackers map[string]FunctionTracker
- Limit *uint64
- presenceTestHasCost bool
-
- cost uint64
- stack refValStack
-}
-
-// ActualCost returns the runtime cost
-func (c *CostTracker) ActualCost() uint64 {
- return c.cost
-}
-
-func (c *CostTracker) costCall(call InterpretableCall, args []ref.Val, result ref.Val) uint64 {
- var cost uint64
- if len(c.overloadTrackers) != 0 {
- if tracker, found := c.overloadTrackers[call.OverloadID()]; found {
- callCost := tracker(args, result)
- if callCost != nil {
- cost += *callCost
- return cost
- }
- }
- }
- if c.Estimator != nil {
- callCost := c.Estimator.CallCost(call.Function(), call.OverloadID(), args, result)
- if callCost != nil {
- cost += *callCost
- return cost
- }
- }
- // if user didn't specify, the default way of calculating runtime cost would be used.
- // if user has their own implementation of ActualCostEstimator, make sure to cover the mapping between overloadId and cost calculation
- switch call.OverloadID() {
- // O(n) functions
- case overloads.StartsWithString, overloads.EndsWithString, overloads.StringToBytes, overloads.BytesToString, overloads.ExtQuoteString, overloads.ExtFormatString:
- cost += uint64(math.Ceil(float64(c.actualSize(args[0])) * common.StringTraversalCostFactor))
- case overloads.InList:
- // If a list is composed entirely of constant values this is O(1), but we don't account for that here.
- // We just assume all list containment checks are O(n).
- cost += c.actualSize(args[1])
- // O(min(m, n)) functions
- case overloads.LessString, overloads.GreaterString, overloads.LessEqualsString, overloads.GreaterEqualsString,
- overloads.LessBytes, overloads.GreaterBytes, overloads.LessEqualsBytes, overloads.GreaterEqualsBytes,
- overloads.Equals, overloads.NotEquals:
- // When we check the equality of 2 scalar values (e.g. 2 integers, 2 floating-point numbers, 2 booleans etc.),
- // the CostTracker.actualSize() function by definition returns 1 for each operand, resulting in an overall cost
- // of 1.
- lhsSize := c.actualSize(args[0])
- rhsSize := c.actualSize(args[1])
- minSize := lhsSize
- if rhsSize < minSize {
- minSize = rhsSize
- }
- cost += uint64(math.Ceil(float64(minSize) * common.StringTraversalCostFactor))
- // O(m+n) functions
- case overloads.AddString, overloads.AddBytes:
- // In the worst case scenario, we would need to reallocate a new backing store and copy both operands over.
- cost += uint64(math.Ceil(float64(c.actualSize(args[0])+c.actualSize(args[1])) * common.StringTraversalCostFactor))
- // O(nm) functions
- case overloads.MatchesString:
- // https://swtch.com/~rsc/regexp/regexp1.html applies to RE2 implementation supported by CEL
- // Add one to string length for purposes of cost calculation to prevent product of string and regex to be 0
- // in case where string is empty but regex is still expensive.
- strCost := uint64(math.Ceil((1.0 + float64(c.actualSize(args[0]))) * common.StringTraversalCostFactor))
- // We don't know how many expressions are in the regex, just the string length (a huge
- // improvement here would be to somehow get a count the number of expressions in the regex or
- // how many states are in the regex state machine and use that to measure regex cost).
- // For now, we're making a guess that each expression in a regex is typically at least 4 chars
- // in length.
- regexCost := uint64(math.Ceil(float64(c.actualSize(args[1])) * common.RegexStringLengthCostFactor))
- cost += strCost * regexCost
- case overloads.ContainsString:
- strCost := uint64(math.Ceil(float64(c.actualSize(args[0])) * common.StringTraversalCostFactor))
- substrCost := uint64(math.Ceil(float64(c.actualSize(args[1])) * common.StringTraversalCostFactor))
- cost += strCost * substrCost
-
- default:
- // The following operations are assumed to have O(1) complexity.
- // - AddList due to the implementation. Index lookup can be O(c) the
- // number of concatenated lists, but we don't track that is cost calculations.
- // - Conversions, since none perform a traversal of a type of unbound length.
- // - Computing the size of strings, byte sequences, lists and maps.
- // - Logical operations and all operators on fixed width scalars (comparisons, equality)
- // - Any functions that don't have a declared cost either here or in provided ActualCostEstimator.
- cost++
-
- }
- return cost
-}
-
-// actualSize returns the size of value
-func (c *CostTracker) actualSize(value ref.Val) uint64 {
- if sz, ok := value.(traits.Sizer); ok {
- return uint64(sz.Size().(types.Int))
- }
- return 1
-}
-
-type stackVal struct {
- Val ref.Val
- ID int64
-}
-
-// refValStack keeps track of values of the stack for cost calculation purposes
-type refValStack []stackVal
-
-func (s *refValStack) push(val ref.Val, id int64) {
- value := stackVal{Val: val, ID: id}
- *s = append(*s, value)
-}
-
-// TODO: Allowing drop and dropArgs to remove stack items above the IDs they are provided is a workaround. drop and dropArgs
-// should find and remove only the stack items matching the provided IDs once all attributes are properly pushed and popped from stack.
-
-// drop searches the stack for each ID and removes the ID and all stack items above it.
-// If none of the IDs are found, the stack is not modified.
-// WARNING: It is possible for multiple expressions with the same ID to exist (due to how macros are implemented) so it's
-// possible that a dropped ID will remain on the stack. They should be removed when IDs on the stack are popped.
-func (s *refValStack) drop(ids ...int64) {
- for _, id := range ids {
- for idx := len(*s) - 1; idx >= 0; idx-- {
- if (*s)[idx].ID == id {
- *s = (*s)[:idx]
- break
- }
- }
- }
-}
-
-// dropArgs searches the stack for all the args by their IDs, accumulates their associated ref.Vals and drops any
-// stack items above any of the arg IDs. If any of the IDs are not found the stack, false is returned.
-// Args are assumed to be found in the stack in reverse order, i.e. the last arg is expected to be found highest in
-// the stack.
-// WARNING: It is possible for multiple expressions with the same ID to exist (due to how macros are implemented) so it's
-// possible that a dropped ID will remain on the stack. They should be removed when IDs on the stack are popped.
-func (s *refValStack) dropArgs(args []Interpretable) ([]ref.Val, bool) {
- result := make([]ref.Val, len(args))
-argloop:
- for nIdx := len(args) - 1; nIdx >= 0; nIdx-- {
- for idx := len(*s) - 1; idx >= 0; idx-- {
- if (*s)[idx].ID == args[nIdx].ID() {
- el := (*s)[idx]
- *s = (*s)[:idx]
- result[nIdx] = el.Val
- continue argloop
- }
- }
- return nil, false
- }
- return result, true
-}
diff --git a/vendor/github.com/google/cel-go/parser/BUILD.bazel b/vendor/github.com/google/cel-go/parser/BUILD.bazel
deleted file mode 100644
index 97bc9bd43..000000000
--- a/vendor/github.com/google/cel-go/parser/BUILD.bazel
+++ /dev/null
@@ -1,58 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
-
-package(
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "errors.go",
- "helper.go",
- "input.go",
- "macro.go",
- "options.go",
- "parser.go",
- "unescape.go",
- "unparser.go",
- ],
- importpath = "github.com/google/cel-go/parser",
- visibility = ["//visibility:public"],
- deps = [
- "//common:go_default_library",
- "//common/ast:go_default_library",
- "//common/operators:go_default_library",
- "//common/runes:go_default_library",
- "//common/types:go_default_library",
- "//common/types/ref:go_default_library",
- "//parser/gen:go_default_library",
- "@com_github_antlr4_go_antlr_v4//:go_default_library",
- "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//types/known/structpb:go_default_library",
- ],
-)
-
-go_test(
- name = "go_default_test",
- size = "small",
- srcs = [
- "helper_test.go",
- "parser_test.go",
- "unescape_test.go",
- "unparser_test.go",
- ],
- embed = [
- ":go_default_library",
- ],
- deps = [
- "//common/ast:go_default_library",
- "//common/debug:go_default_library",
- "//common/types:go_default_library",
- "//parser/gen:go_default_library",
- "//test:go_default_library",
- "@com_github_antlr4_go_antlr_v4//:go_default_library",
- "@org_golang_google_protobuf//proto:go_default_library",
- "@org_golang_google_protobuf//testing/protocmp:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/parser/errors.go b/vendor/github.com/google/cel-go/parser/errors.go
deleted file mode 100644
index 93ae7a3ad..000000000
--- a/vendor/github.com/google/cel-go/parser/errors.go
+++ /dev/null
@@ -1,43 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package parser
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common"
-)
-
-// parseErrors is a specialization of Errors.
-type parseErrors struct {
- errs *common.Errors
-}
-
-// errorCount indicates the number of errors reported.
-func (e *parseErrors) errorCount() int {
- return len(e.errs.GetErrors())
-}
-
-func (e *parseErrors) internalError(message string) {
- e.errs.ReportErrorAtID(0, common.NoLocation, message)
-}
-
-func (e *parseErrors) syntaxError(l common.Location, message string) {
- e.errs.ReportErrorAtID(0, l, fmt.Sprintf("Syntax error: %s", message))
-}
-
-func (e *parseErrors) reportErrorAtID(id int64, l common.Location, message string, args ...any) {
- e.errs.ReportErrorAtID(id, l, message, args...)
-}
diff --git a/vendor/github.com/google/cel-go/parser/gen/BUILD.bazel b/vendor/github.com/google/cel-go/parser/gen/BUILD.bazel
deleted file mode 100644
index e70433483..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/BUILD.bazel
+++ /dev/null
@@ -1,26 +0,0 @@
-load("@io_bazel_rules_go//go:def.bzl", "go_library")
-
-package(
- default_visibility = ["//parser:__subpackages__"],
- licenses = ["notice"], # Apache 2.0
-)
-
-go_library(
- name = "go_default_library",
- srcs = [
- "cel_base_listener.go",
- "cel_base_visitor.go",
- "cel_lexer.go",
- "cel_listener.go",
- "cel_parser.go",
- "cel_visitor.go",
- ],
- data = [
- "CEL.tokens",
- "CELLexer.tokens",
- ],
- importpath = "github.com/google/cel-go/parser/gen",
- deps = [
- "@com_github_antlr4_go_antlr_v4//:go_default_library",
- ],
-)
diff --git a/vendor/github.com/google/cel-go/parser/gen/CEL.g4 b/vendor/github.com/google/cel-go/parser/gen/CEL.g4
deleted file mode 100644
index b011da803..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/CEL.g4
+++ /dev/null
@@ -1,200 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-grammar CEL;
-
-// Grammar Rules
-// =============
-
-start
- : e=expr EOF
- ;
-
-expr
- : e=conditionalOr (op='?' e1=conditionalOr ':' e2=expr)?
- ;
-
-conditionalOr
- : e=conditionalAnd (ops+='||' e1+=conditionalAnd)*
- ;
-
-conditionalAnd
- : e=relation (ops+='&&' e1+=relation)*
- ;
-
-relation
- : calc
- | relation op=('<'|'<='|'>='|'>'|'=='|'!='|'in') relation
- ;
-
-calc
- : unary
- | calc op=('*'|'/'|'%') calc
- | calc op=('+'|'-') calc
- ;
-
-unary
- : member # MemberExpr
- | (ops+='!')+ member # LogicalNot
- | (ops+='-')+ member # Negate
- ;
-
-member
- : primary # PrimaryExpr
- | member op='.' (opt='?')? id=IDENTIFIER # Select
- | member op='.' id=IDENTIFIER open='(' args=exprList? ')' # MemberCall
- | member op='[' (opt='?')? index=expr ']' # Index
- ;
-
-primary
- : leadingDot='.'? id=IDENTIFIER (op='(' args=exprList? ')')? # IdentOrGlobalCall
- | '(' e=expr ')' # Nested
- | op='[' elems=listInit? ','? ']' # CreateList
- | op='{' entries=mapInitializerList? ','? '}' # CreateStruct
- | leadingDot='.'? ids+=IDENTIFIER (ops+='.' ids+=IDENTIFIER)*
- op='{' entries=fieldInitializerList? ','? '}' # CreateMessage
- | literal # ConstantLiteral
- ;
-
-exprList
- : e+=expr (',' e+=expr)*
- ;
-
-listInit
- : elems+=optExpr (',' elems+=optExpr)*
- ;
-
-fieldInitializerList
- : fields+=optField cols+=':' values+=expr (',' fields+=optField cols+=':' values+=expr)*
- ;
-
-optField
- : (opt='?')? IDENTIFIER
- ;
-
-mapInitializerList
- : keys+=optExpr cols+=':' values+=expr (',' keys+=optExpr cols+=':' values+=expr)*
- ;
-
-optExpr
- : (opt='?')? e=expr
- ;
-
-literal
- : sign=MINUS? tok=NUM_INT # Int
- | tok=NUM_UINT # Uint
- | sign=MINUS? tok=NUM_FLOAT # Double
- | tok=STRING # String
- | tok=BYTES # Bytes
- | tok=CEL_TRUE # BoolTrue
- | tok=CEL_FALSE # BoolFalse
- | tok=NUL # Null
- ;
-
-// Lexer Rules
-// ===========
-
-EQUALS : '==';
-NOT_EQUALS : '!=';
-IN: 'in';
-LESS : '<';
-LESS_EQUALS : '<=';
-GREATER_EQUALS : '>=';
-GREATER : '>';
-LOGICAL_AND : '&&';
-LOGICAL_OR : '||';
-
-LBRACKET : '[';
-RPRACKET : ']';
-LBRACE : '{';
-RBRACE : '}';
-LPAREN : '(';
-RPAREN : ')';
-DOT : '.';
-COMMA : ',';
-MINUS : '-';
-EXCLAM : '!';
-QUESTIONMARK : '?';
-COLON : ':';
-PLUS : '+';
-STAR : '*';
-SLASH : '/';
-PERCENT : '%';
-CEL_TRUE : 'true';
-CEL_FALSE : 'false';
-NUL : 'null';
-
-fragment BACKSLASH : '\\';
-fragment LETTER : 'A'..'Z' | 'a'..'z' ;
-fragment DIGIT : '0'..'9' ;
-fragment EXPONENT : ('e' | 'E') ( '+' | '-' )? DIGIT+ ;
-fragment HEXDIGIT : ('0'..'9'|'a'..'f'|'A'..'F') ;
-fragment RAW : 'r' | 'R';
-
-fragment ESC_SEQ
- : ESC_CHAR_SEQ
- | ESC_BYTE_SEQ
- | ESC_UNI_SEQ
- | ESC_OCT_SEQ
- ;
-
-fragment ESC_CHAR_SEQ
- : BACKSLASH ('a'|'b'|'f'|'n'|'r'|'t'|'v'|'"'|'\''|'\\'|'?'|'`')
- ;
-
-fragment ESC_OCT_SEQ
- : BACKSLASH ('0'..'3') ('0'..'7') ('0'..'7')
- ;
-
-fragment ESC_BYTE_SEQ
- : BACKSLASH ( 'x' | 'X' ) HEXDIGIT HEXDIGIT
- ;
-
-fragment ESC_UNI_SEQ
- : BACKSLASH 'u' HEXDIGIT HEXDIGIT HEXDIGIT HEXDIGIT
- | BACKSLASH 'U' HEXDIGIT HEXDIGIT HEXDIGIT HEXDIGIT HEXDIGIT HEXDIGIT HEXDIGIT HEXDIGIT
- ;
-
-WHITESPACE : ( '\t' | ' ' | '\r' | '\n'| '\u000C' )+ -> channel(HIDDEN) ;
-COMMENT : '//' (~'\n')* -> channel(HIDDEN) ;
-
-NUM_FLOAT
- : ( DIGIT+ ('.' DIGIT+) EXPONENT?
- | DIGIT+ EXPONENT
- | '.' DIGIT+ EXPONENT?
- )
- ;
-
-NUM_INT
- : ( DIGIT+ | '0x' HEXDIGIT+ );
-
-NUM_UINT
- : DIGIT+ ( 'u' | 'U' )
- | '0x' HEXDIGIT+ ( 'u' | 'U' )
- ;
-
-STRING
- : '"' (ESC_SEQ | ~('\\'|'"'|'\n'|'\r'))* '"'
- | '\'' (ESC_SEQ | ~('\\'|'\''|'\n'|'\r'))* '\''
- | '"""' (ESC_SEQ | ~('\\'))*? '"""'
- | '\'\'\'' (ESC_SEQ | ~('\\'))*? '\'\'\''
- | RAW '"' ~('"'|'\n'|'\r')* '"'
- | RAW '\'' ~('\''|'\n'|'\r')* '\''
- | RAW '"""' .*? '"""'
- | RAW '\'\'\'' .*? '\'\'\''
- ;
-
-BYTES : ('b' | 'B') STRING;
-
-IDENTIFIER : (LETTER | '_') ( LETTER | DIGIT | '_')*;
diff --git a/vendor/github.com/google/cel-go/parser/gen/CEL.interp b/vendor/github.com/google/cel-go/parser/gen/CEL.interp
deleted file mode 100644
index 75b8bb3e2..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/CEL.interp
+++ /dev/null
@@ -1,99 +0,0 @@
-token literal names:
-null
-'=='
-'!='
-'in'
-'<'
-'<='
-'>='
-'>'
-'&&'
-'||'
-'['
-']'
-'{'
-'}'
-'('
-')'
-'.'
-','
-'-'
-'!'
-'?'
-':'
-'+'
-'*'
-'/'
-'%'
-'true'
-'false'
-'null'
-null
-null
-null
-null
-null
-null
-null
-null
-
-token symbolic names:
-null
-EQUALS
-NOT_EQUALS
-IN
-LESS
-LESS_EQUALS
-GREATER_EQUALS
-GREATER
-LOGICAL_AND
-LOGICAL_OR
-LBRACKET
-RPRACKET
-LBRACE
-RBRACE
-LPAREN
-RPAREN
-DOT
-COMMA
-MINUS
-EXCLAM
-QUESTIONMARK
-COLON
-PLUS
-STAR
-SLASH
-PERCENT
-CEL_TRUE
-CEL_FALSE
-NUL
-WHITESPACE
-COMMENT
-NUM_FLOAT
-NUM_INT
-NUM_UINT
-STRING
-BYTES
-IDENTIFIER
-
-rule names:
-start
-expr
-conditionalOr
-conditionalAnd
-relation
-calc
-unary
-member
-primary
-exprList
-listInit
-fieldInitializerList
-optField
-mapInitializerList
-optExpr
-literal
-
-
-atn:
-[4, 1, 36, 251, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 42, 8, 1, 1, 2, 1, 2, 1, 2, 5, 2, 47, 8, 2, 10, 2, 12, 2, 50, 9, 2, 1, 3, 1, 3, 1, 3, 5, 3, 55, 8, 3, 10, 3, 12, 3, 58, 9, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 5, 4, 66, 8, 4, 10, 4, 12, 4, 69, 9, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 80, 8, 5, 10, 5, 12, 5, 83, 9, 5, 1, 6, 1, 6, 4, 6, 87, 8, 6, 11, 6, 12, 6, 88, 1, 6, 1, 6, 4, 6, 93, 8, 6, 11, 6, 12, 6, 94, 1, 6, 3, 6, 98, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 106, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 114, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 120, 8, 7, 1, 7, 1, 7, 1, 7, 5, 7, 125, 8, 7, 10, 7, 12, 7, 128, 9, 7, 1, 8, 3, 8, 131, 8, 8, 1, 8, 1, 8, 1, 8, 3, 8, 136, 8, 8, 1, 8, 3, 8, 139, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 147, 8, 8, 1, 8, 3, 8, 150, 8, 8, 1, 8, 1, 8, 1, 8, 3, 8, 155, 8, 8, 1, 8, 3, 8, 158, 8, 8, 1, 8, 1, 8, 3, 8, 162, 8, 8, 1, 8, 1, 8, 1, 8, 5, 8, 167, 8, 8, 10, 8, 12, 8, 170, 9, 8, 1, 8, 1, 8, 3, 8, 174, 8, 8, 1, 8, 3, 8, 177, 8, 8, 1, 8, 1, 8, 3, 8, 181, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 186, 8, 9, 10, 9, 12, 9, 189, 9, 9, 1, 10, 1, 10, 1, 10, 5, 10, 194, 8, 10, 10, 10, 12, 10, 197, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 207, 8, 11, 10, 11, 12, 11, 210, 9, 11, 1, 12, 3, 12, 213, 8, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 225, 8, 13, 10, 13, 12, 13, 228, 9, 13, 1, 14, 3, 14, 231, 8, 14, 1, 14, 1, 14, 1, 15, 3, 15, 236, 8, 15, 1, 15, 1, 15, 1, 15, 3, 15, 241, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 249, 8, 15, 1, 15, 0, 3, 8, 10, 14, 16, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 0, 3, 1, 0, 1, 7, 1, 0, 23, 25, 2, 0, 18, 18, 22, 22, 281, 0, 32, 1, 0, 0, 0, 2, 35, 1, 0, 0, 0, 4, 43, 1, 0, 0, 0, 6, 51, 1, 0, 0, 0, 8, 59, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 97, 1, 0, 0, 0, 14, 99, 1, 0, 0, 0, 16, 180, 1, 0, 0, 0, 18, 182, 1, 0, 0, 0, 20, 190, 1, 0, 0, 0, 22, 198, 1, 0, 0, 0, 24, 212, 1, 0, 0, 0, 26, 216, 1, 0, 0, 0, 28, 230, 1, 0, 0, 0, 30, 248, 1, 0, 0, 0, 32, 33, 3, 2, 1, 0, 33, 34, 5, 0, 0, 1, 34, 1, 1, 0, 0, 0, 35, 41, 3, 4, 2, 0, 36, 37, 5, 20, 0, 0, 37, 38, 3, 4, 2, 0, 38, 39, 5, 21, 0, 0, 39, 40, 3, 2, 1, 0, 40, 42, 1, 0, 0, 0, 41, 36, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 3, 1, 0, 0, 0, 43, 48, 3, 6, 3, 0, 44, 45, 5, 9, 0, 0, 45, 47, 3, 6, 3, 0, 46, 44, 1, 0, 0, 0, 47, 50, 1, 0, 0, 0, 48, 46, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 5, 1, 0, 0, 0, 50, 48, 1, 0, 0, 0, 51, 56, 3, 8, 4, 0, 52, 53, 5, 8, 0, 0, 53, 55, 3, 8, 4, 0, 54, 52, 1, 0, 0, 0, 55, 58, 1, 0, 0, 0, 56, 54, 1, 0, 0, 0, 56, 57, 1, 0, 0, 0, 57, 7, 1, 0, 0, 0, 58, 56, 1, 0, 0, 0, 59, 60, 6, 4, -1, 0, 60, 61, 3, 10, 5, 0, 61, 67, 1, 0, 0, 0, 62, 63, 10, 1, 0, 0, 63, 64, 7, 0, 0, 0, 64, 66, 3, 8, 4, 2, 65, 62, 1, 0, 0, 0, 66, 69, 1, 0, 0, 0, 67, 65, 1, 0, 0, 0, 67, 68, 1, 0, 0, 0, 68, 9, 1, 0, 0, 0, 69, 67, 1, 0, 0, 0, 70, 71, 6, 5, -1, 0, 71, 72, 3, 12, 6, 0, 72, 81, 1, 0, 0, 0, 73, 74, 10, 2, 0, 0, 74, 75, 7, 1, 0, 0, 75, 80, 3, 10, 5, 3, 76, 77, 10, 1, 0, 0, 77, 78, 7, 2, 0, 0, 78, 80, 3, 10, 5, 2, 79, 73, 1, 0, 0, 0, 79, 76, 1, 0, 0, 0, 80, 83, 1, 0, 0, 0, 81, 79, 1, 0, 0, 0, 81, 82, 1, 0, 0, 0, 82, 11, 1, 0, 0, 0, 83, 81, 1, 0, 0, 0, 84, 98, 3, 14, 7, 0, 85, 87, 5, 19, 0, 0, 86, 85, 1, 0, 0, 0, 87, 88, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, 88, 89, 1, 0, 0, 0, 89, 90, 1, 0, 0, 0, 90, 98, 3, 14, 7, 0, 91, 93, 5, 18, 0, 0, 92, 91, 1, 0, 0, 0, 93, 94, 1, 0, 0, 0, 94, 92, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 98, 3, 14, 7, 0, 97, 84, 1, 0, 0, 0, 97, 86, 1, 0, 0, 0, 97, 92, 1, 0, 0, 0, 98, 13, 1, 0, 0, 0, 99, 100, 6, 7, -1, 0, 100, 101, 3, 16, 8, 0, 101, 126, 1, 0, 0, 0, 102, 103, 10, 3, 0, 0, 103, 105, 5, 16, 0, 0, 104, 106, 5, 20, 0, 0, 105, 104, 1, 0, 0, 0, 105, 106, 1, 0, 0, 0, 106, 107, 1, 0, 0, 0, 107, 125, 5, 36, 0, 0, 108, 109, 10, 2, 0, 0, 109, 110, 5, 16, 0, 0, 110, 111, 5, 36, 0, 0, 111, 113, 5, 14, 0, 0, 112, 114, 3, 18, 9, 0, 113, 112, 1, 0, 0, 0, 113, 114, 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115, 125, 5, 15, 0, 0, 116, 117, 10, 1, 0, 0, 117, 119, 5, 10, 0, 0, 118, 120, 5, 20, 0, 0, 119, 118, 1, 0, 0, 0, 119, 120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 3, 2, 1, 0, 122, 123, 5, 11, 0, 0, 123, 125, 1, 0, 0, 0, 124, 102, 1, 0, 0, 0, 124, 108, 1, 0, 0, 0, 124, 116, 1, 0, 0, 0, 125, 128, 1, 0, 0, 0, 126, 124, 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 15, 1, 0, 0, 0, 128, 126, 1, 0, 0, 0, 129, 131, 5, 16, 0, 0, 130, 129, 1, 0, 0, 0, 130, 131, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 138, 5, 36, 0, 0, 133, 135, 5, 14, 0, 0, 134, 136, 3, 18, 9, 0, 135, 134, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 137, 1, 0, 0, 0, 137, 139, 5, 15, 0, 0, 138, 133, 1, 0, 0, 0, 138, 139, 1, 0, 0, 0, 139, 181, 1, 0, 0, 0, 140, 141, 5, 14, 0, 0, 141, 142, 3, 2, 1, 0, 142, 143, 5, 15, 0, 0, 143, 181, 1, 0, 0, 0, 144, 146, 5, 10, 0, 0, 145, 147, 3, 20, 10, 0, 146, 145, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 149, 1, 0, 0, 0, 148, 150, 5, 17, 0, 0, 149, 148, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 151, 1, 0, 0, 0, 151, 181, 5, 11, 0, 0, 152, 154, 5, 12, 0, 0, 153, 155, 3, 26, 13, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, 155, 157, 1, 0, 0, 0, 156, 158, 5, 17, 0, 0, 157, 156, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 159, 1, 0, 0, 0, 159, 181, 5, 13, 0, 0, 160, 162, 5, 16, 0, 0, 161, 160, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 1, 0, 0, 0, 163, 168, 5, 36, 0, 0, 164, 165, 5, 16, 0, 0, 165, 167, 5, 36, 0, 0, 166, 164, 1, 0, 0, 0, 167, 170, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 168, 169, 1, 0, 0, 0, 169, 171, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 171, 173, 5, 12, 0, 0, 172, 174, 3, 22, 11, 0, 173, 172, 1, 0, 0, 0, 173, 174, 1, 0, 0, 0, 174, 176, 1, 0, 0, 0, 175, 177, 5, 17, 0, 0, 176, 175, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 181, 5, 13, 0, 0, 179, 181, 3, 30, 15, 0, 180, 130, 1, 0, 0, 0, 180, 140, 1, 0, 0, 0, 180, 144, 1, 0, 0, 0, 180, 152, 1, 0, 0, 0, 180, 161, 1, 0, 0, 0, 180, 179, 1, 0, 0, 0, 181, 17, 1, 0, 0, 0, 182, 187, 3, 2, 1, 0, 183, 184, 5, 17, 0, 0, 184, 186, 3, 2, 1, 0, 185, 183, 1, 0, 0, 0, 186, 189, 1, 0, 0, 0, 187, 185, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 19, 1, 0, 0, 0, 189, 187, 1, 0, 0, 0, 190, 195, 3, 28, 14, 0, 191, 192, 5, 17, 0, 0, 192, 194, 3, 28, 14, 0, 193, 191, 1, 0, 0, 0, 194, 197, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 195, 196, 1, 0, 0, 0, 196, 21, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 199, 3, 24, 12, 0, 199, 200, 5, 21, 0, 0, 200, 208, 3, 2, 1, 0, 201, 202, 5, 17, 0, 0, 202, 203, 3, 24, 12, 0, 203, 204, 5, 21, 0, 0, 204, 205, 3, 2, 1, 0, 205, 207, 1, 0, 0, 0, 206, 201, 1, 0, 0, 0, 207, 210, 1, 0, 0, 0, 208, 206, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 23, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 211, 213, 5, 20, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 36, 0, 0, 215, 25, 1, 0, 0, 0, 216, 217, 3, 28, 14, 0, 217, 218, 5, 21, 0, 0, 218, 226, 3, 2, 1, 0, 219, 220, 5, 17, 0, 0, 220, 221, 3, 28, 14, 0, 221, 222, 5, 21, 0, 0, 222, 223, 3, 2, 1, 0, 223, 225, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 225, 228, 1, 0, 0, 0, 226, 224, 1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 27, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 229, 231, 5, 20, 0, 0, 230, 229, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 233, 3, 2, 1, 0, 233, 29, 1, 0, 0, 0, 234, 236, 5, 18, 0, 0, 235, 234, 1, 0, 0, 0, 235, 236, 1, 0, 0, 0, 236, 237, 1, 0, 0, 0, 237, 249, 5, 32, 0, 0, 238, 249, 5, 33, 0, 0, 239, 241, 5, 18, 0, 0, 240, 239, 1, 0, 0, 0, 240, 241, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 249, 5, 31, 0, 0, 243, 249, 5, 34, 0, 0, 244, 249, 5, 35, 0, 0, 245, 249, 5, 26, 0, 0, 246, 249, 5, 27, 0, 0, 247, 249, 5, 28, 0, 0, 248, 235, 1, 0, 0, 0, 248, 238, 1, 0, 0, 0, 248, 240, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 248, 245, 1, 0, 0, 0, 248, 246, 1, 0, 0, 0, 248, 247, 1, 0, 0, 0, 249, 31, 1, 0, 0, 0, 35, 41, 48, 56, 67, 79, 81, 88, 94, 97, 105, 113, 119, 124, 126, 130, 135, 138, 146, 149, 154, 157, 161, 168, 173, 176, 180, 187, 195, 208, 212, 226, 230, 235, 240, 248]
\ No newline at end of file
diff --git a/vendor/github.com/google/cel-go/parser/gen/CEL.tokens b/vendor/github.com/google/cel-go/parser/gen/CEL.tokens
deleted file mode 100644
index b305bdad3..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/CEL.tokens
+++ /dev/null
@@ -1,64 +0,0 @@
-EQUALS=1
-NOT_EQUALS=2
-IN=3
-LESS=4
-LESS_EQUALS=5
-GREATER_EQUALS=6
-GREATER=7
-LOGICAL_AND=8
-LOGICAL_OR=9
-LBRACKET=10
-RPRACKET=11
-LBRACE=12
-RBRACE=13
-LPAREN=14
-RPAREN=15
-DOT=16
-COMMA=17
-MINUS=18
-EXCLAM=19
-QUESTIONMARK=20
-COLON=21
-PLUS=22
-STAR=23
-SLASH=24
-PERCENT=25
-CEL_TRUE=26
-CEL_FALSE=27
-NUL=28
-WHITESPACE=29
-COMMENT=30
-NUM_FLOAT=31
-NUM_INT=32
-NUM_UINT=33
-STRING=34
-BYTES=35
-IDENTIFIER=36
-'=='=1
-'!='=2
-'in'=3
-'<'=4
-'<='=5
-'>='=6
-'>'=7
-'&&'=8
-'||'=9
-'['=10
-']'=11
-'{'=12
-'}'=13
-'('=14
-')'=15
-'.'=16
-','=17
-'-'=18
-'!'=19
-'?'=20
-':'=21
-'+'=22
-'*'=23
-'/'=24
-'%'=25
-'true'=26
-'false'=27
-'null'=28
diff --git a/vendor/github.com/google/cel-go/parser/gen/CELLexer.interp b/vendor/github.com/google/cel-go/parser/gen/CELLexer.interp
deleted file mode 100644
index 26e7f471e..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/CELLexer.interp
+++ /dev/null
@@ -1,136 +0,0 @@
-token literal names:
-null
-'=='
-'!='
-'in'
-'<'
-'<='
-'>='
-'>'
-'&&'
-'||'
-'['
-']'
-'{'
-'}'
-'('
-')'
-'.'
-','
-'-'
-'!'
-'?'
-':'
-'+'
-'*'
-'/'
-'%'
-'true'
-'false'
-'null'
-null
-null
-null
-null
-null
-null
-null
-null
-
-token symbolic names:
-null
-EQUALS
-NOT_EQUALS
-IN
-LESS
-LESS_EQUALS
-GREATER_EQUALS
-GREATER
-LOGICAL_AND
-LOGICAL_OR
-LBRACKET
-RPRACKET
-LBRACE
-RBRACE
-LPAREN
-RPAREN
-DOT
-COMMA
-MINUS
-EXCLAM
-QUESTIONMARK
-COLON
-PLUS
-STAR
-SLASH
-PERCENT
-CEL_TRUE
-CEL_FALSE
-NUL
-WHITESPACE
-COMMENT
-NUM_FLOAT
-NUM_INT
-NUM_UINT
-STRING
-BYTES
-IDENTIFIER
-
-rule names:
-EQUALS
-NOT_EQUALS
-IN
-LESS
-LESS_EQUALS
-GREATER_EQUALS
-GREATER
-LOGICAL_AND
-LOGICAL_OR
-LBRACKET
-RPRACKET
-LBRACE
-RBRACE
-LPAREN
-RPAREN
-DOT
-COMMA
-MINUS
-EXCLAM
-QUESTIONMARK
-COLON
-PLUS
-STAR
-SLASH
-PERCENT
-CEL_TRUE
-CEL_FALSE
-NUL
-BACKSLASH
-LETTER
-DIGIT
-EXPONENT
-HEXDIGIT
-RAW
-ESC_SEQ
-ESC_CHAR_SEQ
-ESC_OCT_SEQ
-ESC_BYTE_SEQ
-ESC_UNI_SEQ
-WHITESPACE
-COMMENT
-NUM_FLOAT
-NUM_INT
-NUM_UINT
-STRING
-BYTES
-IDENTIFIER
-
-channel names:
-DEFAULT_TOKEN_CHANNEL
-HIDDEN
-
-mode names:
-DEFAULT_MODE
-
-atn:
-[4, 0, 36, 423, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 177, 8, 31, 1, 31, 4, 31, 180, 8, 31, 11, 31, 12, 31, 181, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 3, 34, 192, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 225, 8, 38, 1, 39, 4, 39, 228, 8, 39, 11, 39, 12, 39, 229, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 238, 8, 40, 10, 40, 12, 40, 241, 9, 40, 1, 40, 1, 40, 1, 41, 4, 41, 246, 8, 41, 11, 41, 12, 41, 247, 1, 41, 1, 41, 4, 41, 252, 8, 41, 11, 41, 12, 41, 253, 1, 41, 3, 41, 257, 8, 41, 1, 41, 4, 41, 260, 8, 41, 11, 41, 12, 41, 261, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 268, 8, 41, 11, 41, 12, 41, 269, 1, 41, 3, 41, 273, 8, 41, 3, 41, 275, 8, 41, 1, 42, 4, 42, 278, 8, 42, 11, 42, 12, 42, 279, 1, 42, 1, 42, 1, 42, 1, 42, 4, 42, 286, 8, 42, 11, 42, 12, 42, 287, 3, 42, 290, 8, 42, 1, 43, 4, 43, 293, 8, 43, 11, 43, 12, 43, 294, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 4, 43, 303, 8, 43, 11, 43, 12, 43, 304, 1, 43, 1, 43, 3, 43, 309, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44, 314, 8, 44, 10, 44, 12, 44, 317, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 323, 8, 44, 10, 44, 12, 44, 326, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 335, 8, 44, 10, 44, 12, 44, 338, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 349, 8, 44, 10, 44, 12, 44, 352, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 360, 8, 44, 10, 44, 12, 44, 363, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 370, 8, 44, 10, 44, 12, 44, 373, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 383, 8, 44, 10, 44, 12, 44, 386, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 398, 8, 44, 10, 44, 12, 44, 401, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 407, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 3, 46, 414, 8, 46, 1, 46, 1, 46, 1, 46, 5, 46, 419, 8, 46, 10, 46, 12, 46, 422, 9, 46, 4, 336, 350, 384, 399, 0, 47, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 0, 61, 0, 63, 0, 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 29, 81, 30, 83, 31, 85, 32, 87, 33, 89, 34, 91, 35, 93, 36, 1, 0, 16, 2, 0, 65, 90, 97, 122, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 3, 0, 48, 57, 65, 70, 97, 102, 2, 0, 82, 82, 114, 114, 10, 0, 34, 34, 39, 39, 63, 63, 92, 92, 96, 98, 102, 102, 110, 110, 114, 114, 116, 116, 118, 118, 2, 0, 88, 88, 120, 120, 3, 0, 9, 10, 12, 13, 32, 32, 1, 0, 10, 10, 2, 0, 85, 85, 117, 117, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 4, 0, 10, 10, 13, 13, 39, 39, 92, 92, 1, 0, 92, 92, 3, 0, 10, 10, 13, 13, 34, 34, 3, 0, 10, 10, 13, 13, 39, 39, 2, 0, 66, 66, 98, 98, 456, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, 3, 98, 1, 0, 0, 0, 5, 101, 1, 0, 0, 0, 7, 104, 1, 0, 0, 0, 9, 106, 1, 0, 0, 0, 11, 109, 1, 0, 0, 0, 13, 112, 1, 0, 0, 0, 15, 114, 1, 0, 0, 0, 17, 117, 1, 0, 0, 0, 19, 120, 1, 0, 0, 0, 21, 122, 1, 0, 0, 0, 23, 124, 1, 0, 0, 0, 25, 126, 1, 0, 0, 0, 27, 128, 1, 0, 0, 0, 29, 130, 1, 0, 0, 0, 31, 132, 1, 0, 0, 0, 33, 134, 1, 0, 0, 0, 35, 136, 1, 0, 0, 0, 37, 138, 1, 0, 0, 0, 39, 140, 1, 0, 0, 0, 41, 142, 1, 0, 0, 0, 43, 144, 1, 0, 0, 0, 45, 146, 1, 0, 0, 0, 47, 148, 1, 0, 0, 0, 49, 150, 1, 0, 0, 0, 51, 152, 1, 0, 0, 0, 53, 157, 1, 0, 0, 0, 55, 163, 1, 0, 0, 0, 57, 168, 1, 0, 0, 0, 59, 170, 1, 0, 0, 0, 61, 172, 1, 0, 0, 0, 63, 174, 1, 0, 0, 0, 65, 183, 1, 0, 0, 0, 67, 185, 1, 0, 0, 0, 69, 191, 1, 0, 0, 0, 71, 193, 1, 0, 0, 0, 73, 196, 1, 0, 0, 0, 75, 201, 1, 0, 0, 0, 77, 224, 1, 0, 0, 0, 79, 227, 1, 0, 0, 0, 81, 233, 1, 0, 0, 0, 83, 274, 1, 0, 0, 0, 85, 289, 1, 0, 0, 0, 87, 308, 1, 0, 0, 0, 89, 406, 1, 0, 0, 0, 91, 408, 1, 0, 0, 0, 93, 413, 1, 0, 0, 0, 95, 96, 5, 61, 0, 0, 96, 97, 5, 61, 0, 0, 97, 2, 1, 0, 0, 0, 98, 99, 5, 33, 0, 0, 99, 100, 5, 61, 0, 0, 100, 4, 1, 0, 0, 0, 101, 102, 5, 105, 0, 0, 102, 103, 5, 110, 0, 0, 103, 6, 1, 0, 0, 0, 104, 105, 5, 60, 0, 0, 105, 8, 1, 0, 0, 0, 106, 107, 5, 60, 0, 0, 107, 108, 5, 61, 0, 0, 108, 10, 1, 0, 0, 0, 109, 110, 5, 62, 0, 0, 110, 111, 5, 61, 0, 0, 111, 12, 1, 0, 0, 0, 112, 113, 5, 62, 0, 0, 113, 14, 1, 0, 0, 0, 114, 115, 5, 38, 0, 0, 115, 116, 5, 38, 0, 0, 116, 16, 1, 0, 0, 0, 117, 118, 5, 124, 0, 0, 118, 119, 5, 124, 0, 0, 119, 18, 1, 0, 0, 0, 120, 121, 5, 91, 0, 0, 121, 20, 1, 0, 0, 0, 122, 123, 5, 93, 0, 0, 123, 22, 1, 0, 0, 0, 124, 125, 5, 123, 0, 0, 125, 24, 1, 0, 0, 0, 126, 127, 5, 125, 0, 0, 127, 26, 1, 0, 0, 0, 128, 129, 5, 40, 0, 0, 129, 28, 1, 0, 0, 0, 130, 131, 5, 41, 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 5, 46, 0, 0, 133, 32, 1, 0, 0, 0, 134, 135, 5, 44, 0, 0, 135, 34, 1, 0, 0, 0, 136, 137, 5, 45, 0, 0, 137, 36, 1, 0, 0, 0, 138, 139, 5, 33, 0, 0, 139, 38, 1, 0, 0, 0, 140, 141, 5, 63, 0, 0, 141, 40, 1, 0, 0, 0, 142, 143, 5, 58, 0, 0, 143, 42, 1, 0, 0, 0, 144, 145, 5, 43, 0, 0, 145, 44, 1, 0, 0, 0, 146, 147, 5, 42, 0, 0, 147, 46, 1, 0, 0, 0, 148, 149, 5, 47, 0, 0, 149, 48, 1, 0, 0, 0, 150, 151, 5, 37, 0, 0, 151, 50, 1, 0, 0, 0, 152, 153, 5, 116, 0, 0, 153, 154, 5, 114, 0, 0, 154, 155, 5, 117, 0, 0, 155, 156, 5, 101, 0, 0, 156, 52, 1, 0, 0, 0, 157, 158, 5, 102, 0, 0, 158, 159, 5, 97, 0, 0, 159, 160, 5, 108, 0, 0, 160, 161, 5, 115, 0, 0, 161, 162, 5, 101, 0, 0, 162, 54, 1, 0, 0, 0, 163, 164, 5, 110, 0, 0, 164, 165, 5, 117, 0, 0, 165, 166, 5, 108, 0, 0, 166, 167, 5, 108, 0, 0, 167, 56, 1, 0, 0, 0, 168, 169, 5, 92, 0, 0, 169, 58, 1, 0, 0, 0, 170, 171, 7, 0, 0, 0, 171, 60, 1, 0, 0, 0, 172, 173, 2, 48, 57, 0, 173, 62, 1, 0, 0, 0, 174, 176, 7, 1, 0, 0, 175, 177, 7, 2, 0, 0, 176, 175, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 179, 1, 0, 0, 0, 178, 180, 3, 61, 30, 0, 179, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 179, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 64, 1, 0, 0, 0, 183, 184, 7, 3, 0, 0, 184, 66, 1, 0, 0, 0, 185, 186, 7, 4, 0, 0, 186, 68, 1, 0, 0, 0, 187, 192, 3, 71, 35, 0, 188, 192, 3, 75, 37, 0, 189, 192, 3, 77, 38, 0, 190, 192, 3, 73, 36, 0, 191, 187, 1, 0, 0, 0, 191, 188, 1, 0, 0, 0, 191, 189, 1, 0, 0, 0, 191, 190, 1, 0, 0, 0, 192, 70, 1, 0, 0, 0, 193, 194, 3, 57, 28, 0, 194, 195, 7, 5, 0, 0, 195, 72, 1, 0, 0, 0, 196, 197, 3, 57, 28, 0, 197, 198, 2, 48, 51, 0, 198, 199, 2, 48, 55, 0, 199, 200, 2, 48, 55, 0, 200, 74, 1, 0, 0, 0, 201, 202, 3, 57, 28, 0, 202, 203, 7, 6, 0, 0, 203, 204, 3, 65, 32, 0, 204, 205, 3, 65, 32, 0, 205, 76, 1, 0, 0, 0, 206, 207, 3, 57, 28, 0, 207, 208, 5, 117, 0, 0, 208, 209, 3, 65, 32, 0, 209, 210, 3, 65, 32, 0, 210, 211, 3, 65, 32, 0, 211, 212, 3, 65, 32, 0, 212, 225, 1, 0, 0, 0, 213, 214, 3, 57, 28, 0, 214, 215, 5, 85, 0, 0, 215, 216, 3, 65, 32, 0, 216, 217, 3, 65, 32, 0, 217, 218, 3, 65, 32, 0, 218, 219, 3, 65, 32, 0, 219, 220, 3, 65, 32, 0, 220, 221, 3, 65, 32, 0, 221, 222, 3, 65, 32, 0, 222, 223, 3, 65, 32, 0, 223, 225, 1, 0, 0, 0, 224, 206, 1, 0, 0, 0, 224, 213, 1, 0, 0, 0, 225, 78, 1, 0, 0, 0, 226, 228, 7, 7, 0, 0, 227, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 227, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 6, 39, 0, 0, 232, 80, 1, 0, 0, 0, 233, 234, 5, 47, 0, 0, 234, 235, 5, 47, 0, 0, 235, 239, 1, 0, 0, 0, 236, 238, 8, 8, 0, 0, 237, 236, 1, 0, 0, 0, 238, 241, 1, 0, 0, 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 242, 1, 0, 0, 0, 241, 239, 1, 0, 0, 0, 242, 243, 6, 40, 0, 0, 243, 82, 1, 0, 0, 0, 244, 246, 3, 61, 30, 0, 245, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 5, 46, 0, 0, 250, 252, 3, 61, 30, 0, 251, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0, 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 256, 1, 0, 0, 0, 255, 257, 3, 63, 31, 0, 256, 255, 1, 0, 0, 0, 256, 257, 1, 0, 0, 0, 257, 275, 1, 0, 0, 0, 258, 260, 3, 61, 30, 0, 259, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 1, 0, 0, 0, 263, 264, 3, 63, 31, 0, 264, 275, 1, 0, 0, 0, 265, 267, 5, 46, 0, 0, 266, 268, 3, 61, 30, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 272, 1, 0, 0, 0, 271, 273, 3, 63, 31, 0, 272, 271, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 275, 1, 0, 0, 0, 274, 245, 1, 0, 0, 0, 274, 259, 1, 0, 0, 0, 274, 265, 1, 0, 0, 0, 275, 84, 1, 0, 0, 0, 276, 278, 3, 61, 30, 0, 277, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 290, 1, 0, 0, 0, 281, 282, 5, 48, 0, 0, 282, 283, 5, 120, 0, 0, 283, 285, 1, 0, 0, 0, 284, 286, 3, 65, 32, 0, 285, 284, 1, 0, 0, 0, 286, 287, 1, 0, 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 290, 1, 0, 0, 0, 289, 277, 1, 0, 0, 0, 289, 281, 1, 0, 0, 0, 290, 86, 1, 0, 0, 0, 291, 293, 3, 61, 30, 0, 292, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 292, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 1, 0, 0, 0, 296, 297, 7, 9, 0, 0, 297, 309, 1, 0, 0, 0, 298, 299, 5, 48, 0, 0, 299, 300, 5, 120, 0, 0, 300, 302, 1, 0, 0, 0, 301, 303, 3, 65, 32, 0, 302, 301, 1, 0, 0, 0, 303, 304, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 307, 7, 9, 0, 0, 307, 309, 1, 0, 0, 0, 308, 292, 1, 0, 0, 0, 308, 298, 1, 0, 0, 0, 309, 88, 1, 0, 0, 0, 310, 315, 5, 34, 0, 0, 311, 314, 3, 69, 34, 0, 312, 314, 8, 10, 0, 0, 313, 311, 1, 0, 0, 0, 313, 312, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 318, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 407, 5, 34, 0, 0, 319, 324, 5, 39, 0, 0, 320, 323, 3, 69, 34, 0, 321, 323, 8, 11, 0, 0, 322, 320, 1, 0, 0, 0, 322, 321, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 327, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 407, 5, 39, 0, 0, 328, 329, 5, 34, 0, 0, 329, 330, 5, 34, 0, 0, 330, 331, 5, 34, 0, 0, 331, 336, 1, 0, 0, 0, 332, 335, 3, 69, 34, 0, 333, 335, 8, 12, 0, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0, 0, 335, 338, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337, 339, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 339, 340, 5, 34, 0, 0, 340, 341, 5, 34, 0, 0, 341, 407, 5, 34, 0, 0, 342, 343, 5, 39, 0, 0, 343, 344, 5, 39, 0, 0, 344, 345, 5, 39, 0, 0, 345, 350, 1, 0, 0, 0, 346, 349, 3, 69, 34, 0, 347, 349, 8, 12, 0, 0, 348, 346, 1, 0, 0, 0, 348, 347, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 353, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 354, 5, 39, 0, 0, 354, 355, 5, 39, 0, 0, 355, 407, 5, 39, 0, 0, 356, 357, 3, 67, 33, 0, 357, 361, 5, 34, 0, 0, 358, 360, 8, 13, 0, 0, 359, 358, 1, 0, 0, 0, 360, 363, 1, 0, 0, 0, 361, 359, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 364, 1, 0, 0, 0, 363, 361, 1, 0, 0, 0, 364, 365, 5, 34, 0, 0, 365, 407, 1, 0, 0, 0, 366, 367, 3, 67, 33, 0, 367, 371, 5, 39, 0, 0, 368, 370, 8, 14, 0, 0, 369, 368, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 374, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 39, 0, 0, 375, 407, 1, 0, 0, 0, 376, 377, 3, 67, 33, 0, 377, 378, 5, 34, 0, 0, 378, 379, 5, 34, 0, 0, 379, 380, 5, 34, 0, 0, 380, 384, 1, 0, 0, 0, 381, 383, 9, 0, 0, 0, 382, 381, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 387, 388, 5, 34, 0, 0, 388, 389, 5, 34, 0, 0, 389, 390, 5, 34, 0, 0, 390, 407, 1, 0, 0, 0, 391, 392, 3, 67, 33, 0, 392, 393, 5, 39, 0, 0, 393, 394, 5, 39, 0, 0, 394, 395, 5, 39, 0, 0, 395, 399, 1, 0, 0, 0, 396, 398, 9, 0, 0, 0, 397, 396, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 399, 397, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, 403, 5, 39, 0, 0, 403, 404, 5, 39, 0, 0, 404, 405, 5, 39, 0, 0, 405, 407, 1, 0, 0, 0, 406, 310, 1, 0, 0, 0, 406, 319, 1, 0, 0, 0, 406, 328, 1, 0, 0, 0, 406, 342, 1, 0, 0, 0, 406, 356, 1, 0, 0, 0, 406, 366, 1, 0, 0, 0, 406, 376, 1, 0, 0, 0, 406, 391, 1, 0, 0, 0, 407, 90, 1, 0, 0, 0, 408, 409, 7, 15, 0, 0, 409, 410, 3, 89, 44, 0, 410, 92, 1, 0, 0, 0, 411, 414, 3, 59, 29, 0, 412, 414, 5, 95, 0, 0, 413, 411, 1, 0, 0, 0, 413, 412, 1, 0, 0, 0, 414, 420, 1, 0, 0, 0, 415, 419, 3, 59, 29, 0, 416, 419, 3, 61, 30, 0, 417, 419, 5, 95, 0, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, 417, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 94, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 36, 0, 176, 181, 191, 224, 229, 239, 247, 253, 256, 261, 269, 272, 274, 279, 287, 289, 294, 304, 308, 313, 315, 322, 324, 334, 336, 348, 350, 361, 371, 384, 399, 406, 413, 418, 420, 1, 0, 1, 0]
\ No newline at end of file
diff --git a/vendor/github.com/google/cel-go/parser/gen/CELLexer.tokens b/vendor/github.com/google/cel-go/parser/gen/CELLexer.tokens
deleted file mode 100644
index b305bdad3..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/CELLexer.tokens
+++ /dev/null
@@ -1,64 +0,0 @@
-EQUALS=1
-NOT_EQUALS=2
-IN=3
-LESS=4
-LESS_EQUALS=5
-GREATER_EQUALS=6
-GREATER=7
-LOGICAL_AND=8
-LOGICAL_OR=9
-LBRACKET=10
-RPRACKET=11
-LBRACE=12
-RBRACE=13
-LPAREN=14
-RPAREN=15
-DOT=16
-COMMA=17
-MINUS=18
-EXCLAM=19
-QUESTIONMARK=20
-COLON=21
-PLUS=22
-STAR=23
-SLASH=24
-PERCENT=25
-CEL_TRUE=26
-CEL_FALSE=27
-NUL=28
-WHITESPACE=29
-COMMENT=30
-NUM_FLOAT=31
-NUM_INT=32
-NUM_UINT=33
-STRING=34
-BYTES=35
-IDENTIFIER=36
-'=='=1
-'!='=2
-'in'=3
-'<'=4
-'<='=5
-'>='=6
-'>'=7
-'&&'=8
-'||'=9
-'['=10
-']'=11
-'{'=12
-'}'=13
-'('=14
-')'=15
-'.'=16
-','=17
-'-'=18
-'!'=19
-'?'=20
-':'=21
-'+'=22
-'*'=23
-'/'=24
-'%'=25
-'true'=26
-'false'=27
-'null'=28
diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_base_listener.go b/vendor/github.com/google/cel-go/parser/gen/cel_base_listener.go
deleted file mode 100644
index c49d03867..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/cel_base_listener.go
+++ /dev/null
@@ -1,219 +0,0 @@
-// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT.
-
-package gen // CEL
-import "github.com/antlr4-go/antlr/v4"
-
-// BaseCELListener is a complete listener for a parse tree produced by CELParser.
-type BaseCELListener struct{}
-
-var _ CELListener = &BaseCELListener{}
-
-// VisitTerminal is called when a terminal node is visited.
-func (s *BaseCELListener) VisitTerminal(node antlr.TerminalNode) {}
-
-// VisitErrorNode is called when an error node is visited.
-func (s *BaseCELListener) VisitErrorNode(node antlr.ErrorNode) {}
-
-// EnterEveryRule is called when any rule is entered.
-func (s *BaseCELListener) EnterEveryRule(ctx antlr.ParserRuleContext) {}
-
-// ExitEveryRule is called when any rule is exited.
-func (s *BaseCELListener) ExitEveryRule(ctx antlr.ParserRuleContext) {}
-
-// EnterStart is called when production start is entered.
-func (s *BaseCELListener) EnterStart(ctx *StartContext) {}
-
-// ExitStart is called when production start is exited.
-func (s *BaseCELListener) ExitStart(ctx *StartContext) {}
-
-// EnterExpr is called when production expr is entered.
-func (s *BaseCELListener) EnterExpr(ctx *ExprContext) {}
-
-// ExitExpr is called when production expr is exited.
-func (s *BaseCELListener) ExitExpr(ctx *ExprContext) {}
-
-// EnterConditionalOr is called when production conditionalOr is entered.
-func (s *BaseCELListener) EnterConditionalOr(ctx *ConditionalOrContext) {}
-
-// ExitConditionalOr is called when production conditionalOr is exited.
-func (s *BaseCELListener) ExitConditionalOr(ctx *ConditionalOrContext) {}
-
-// EnterConditionalAnd is called when production conditionalAnd is entered.
-func (s *BaseCELListener) EnterConditionalAnd(ctx *ConditionalAndContext) {}
-
-// ExitConditionalAnd is called when production conditionalAnd is exited.
-func (s *BaseCELListener) ExitConditionalAnd(ctx *ConditionalAndContext) {}
-
-// EnterRelation is called when production relation is entered.
-func (s *BaseCELListener) EnterRelation(ctx *RelationContext) {}
-
-// ExitRelation is called when production relation is exited.
-func (s *BaseCELListener) ExitRelation(ctx *RelationContext) {}
-
-// EnterCalc is called when production calc is entered.
-func (s *BaseCELListener) EnterCalc(ctx *CalcContext) {}
-
-// ExitCalc is called when production calc is exited.
-func (s *BaseCELListener) ExitCalc(ctx *CalcContext) {}
-
-// EnterMemberExpr is called when production MemberExpr is entered.
-func (s *BaseCELListener) EnterMemberExpr(ctx *MemberExprContext) {}
-
-// ExitMemberExpr is called when production MemberExpr is exited.
-func (s *BaseCELListener) ExitMemberExpr(ctx *MemberExprContext) {}
-
-// EnterLogicalNot is called when production LogicalNot is entered.
-func (s *BaseCELListener) EnterLogicalNot(ctx *LogicalNotContext) {}
-
-// ExitLogicalNot is called when production LogicalNot is exited.
-func (s *BaseCELListener) ExitLogicalNot(ctx *LogicalNotContext) {}
-
-// EnterNegate is called when production Negate is entered.
-func (s *BaseCELListener) EnterNegate(ctx *NegateContext) {}
-
-// ExitNegate is called when production Negate is exited.
-func (s *BaseCELListener) ExitNegate(ctx *NegateContext) {}
-
-// EnterMemberCall is called when production MemberCall is entered.
-func (s *BaseCELListener) EnterMemberCall(ctx *MemberCallContext) {}
-
-// ExitMemberCall is called when production MemberCall is exited.
-func (s *BaseCELListener) ExitMemberCall(ctx *MemberCallContext) {}
-
-// EnterSelect is called when production Select is entered.
-func (s *BaseCELListener) EnterSelect(ctx *SelectContext) {}
-
-// ExitSelect is called when production Select is exited.
-func (s *BaseCELListener) ExitSelect(ctx *SelectContext) {}
-
-// EnterPrimaryExpr is called when production PrimaryExpr is entered.
-func (s *BaseCELListener) EnterPrimaryExpr(ctx *PrimaryExprContext) {}
-
-// ExitPrimaryExpr is called when production PrimaryExpr is exited.
-func (s *BaseCELListener) ExitPrimaryExpr(ctx *PrimaryExprContext) {}
-
-// EnterIndex is called when production Index is entered.
-func (s *BaseCELListener) EnterIndex(ctx *IndexContext) {}
-
-// ExitIndex is called when production Index is exited.
-func (s *BaseCELListener) ExitIndex(ctx *IndexContext) {}
-
-// EnterIdentOrGlobalCall is called when production IdentOrGlobalCall is entered.
-func (s *BaseCELListener) EnterIdentOrGlobalCall(ctx *IdentOrGlobalCallContext) {}
-
-// ExitIdentOrGlobalCall is called when production IdentOrGlobalCall is exited.
-func (s *BaseCELListener) ExitIdentOrGlobalCall(ctx *IdentOrGlobalCallContext) {}
-
-// EnterNested is called when production Nested is entered.
-func (s *BaseCELListener) EnterNested(ctx *NestedContext) {}
-
-// ExitNested is called when production Nested is exited.
-func (s *BaseCELListener) ExitNested(ctx *NestedContext) {}
-
-// EnterCreateList is called when production CreateList is entered.
-func (s *BaseCELListener) EnterCreateList(ctx *CreateListContext) {}
-
-// ExitCreateList is called when production CreateList is exited.
-func (s *BaseCELListener) ExitCreateList(ctx *CreateListContext) {}
-
-// EnterCreateStruct is called when production CreateStruct is entered.
-func (s *BaseCELListener) EnterCreateStruct(ctx *CreateStructContext) {}
-
-// ExitCreateStruct is called when production CreateStruct is exited.
-func (s *BaseCELListener) ExitCreateStruct(ctx *CreateStructContext) {}
-
-// EnterCreateMessage is called when production CreateMessage is entered.
-func (s *BaseCELListener) EnterCreateMessage(ctx *CreateMessageContext) {}
-
-// ExitCreateMessage is called when production CreateMessage is exited.
-func (s *BaseCELListener) ExitCreateMessage(ctx *CreateMessageContext) {}
-
-// EnterConstantLiteral is called when production ConstantLiteral is entered.
-func (s *BaseCELListener) EnterConstantLiteral(ctx *ConstantLiteralContext) {}
-
-// ExitConstantLiteral is called when production ConstantLiteral is exited.
-func (s *BaseCELListener) ExitConstantLiteral(ctx *ConstantLiteralContext) {}
-
-// EnterExprList is called when production exprList is entered.
-func (s *BaseCELListener) EnterExprList(ctx *ExprListContext) {}
-
-// ExitExprList is called when production exprList is exited.
-func (s *BaseCELListener) ExitExprList(ctx *ExprListContext) {}
-
-// EnterListInit is called when production listInit is entered.
-func (s *BaseCELListener) EnterListInit(ctx *ListInitContext) {}
-
-// ExitListInit is called when production listInit is exited.
-func (s *BaseCELListener) ExitListInit(ctx *ListInitContext) {}
-
-// EnterFieldInitializerList is called when production fieldInitializerList is entered.
-func (s *BaseCELListener) EnterFieldInitializerList(ctx *FieldInitializerListContext) {}
-
-// ExitFieldInitializerList is called when production fieldInitializerList is exited.
-func (s *BaseCELListener) ExitFieldInitializerList(ctx *FieldInitializerListContext) {}
-
-// EnterOptField is called when production optField is entered.
-func (s *BaseCELListener) EnterOptField(ctx *OptFieldContext) {}
-
-// ExitOptField is called when production optField is exited.
-func (s *BaseCELListener) ExitOptField(ctx *OptFieldContext) {}
-
-// EnterMapInitializerList is called when production mapInitializerList is entered.
-func (s *BaseCELListener) EnterMapInitializerList(ctx *MapInitializerListContext) {}
-
-// ExitMapInitializerList is called when production mapInitializerList is exited.
-func (s *BaseCELListener) ExitMapInitializerList(ctx *MapInitializerListContext) {}
-
-// EnterOptExpr is called when production optExpr is entered.
-func (s *BaseCELListener) EnterOptExpr(ctx *OptExprContext) {}
-
-// ExitOptExpr is called when production optExpr is exited.
-func (s *BaseCELListener) ExitOptExpr(ctx *OptExprContext) {}
-
-// EnterInt is called when production Int is entered.
-func (s *BaseCELListener) EnterInt(ctx *IntContext) {}
-
-// ExitInt is called when production Int is exited.
-func (s *BaseCELListener) ExitInt(ctx *IntContext) {}
-
-// EnterUint is called when production Uint is entered.
-func (s *BaseCELListener) EnterUint(ctx *UintContext) {}
-
-// ExitUint is called when production Uint is exited.
-func (s *BaseCELListener) ExitUint(ctx *UintContext) {}
-
-// EnterDouble is called when production Double is entered.
-func (s *BaseCELListener) EnterDouble(ctx *DoubleContext) {}
-
-// ExitDouble is called when production Double is exited.
-func (s *BaseCELListener) ExitDouble(ctx *DoubleContext) {}
-
-// EnterString is called when production String is entered.
-func (s *BaseCELListener) EnterString(ctx *StringContext) {}
-
-// ExitString is called when production String is exited.
-func (s *BaseCELListener) ExitString(ctx *StringContext) {}
-
-// EnterBytes is called when production Bytes is entered.
-func (s *BaseCELListener) EnterBytes(ctx *BytesContext) {}
-
-// ExitBytes is called when production Bytes is exited.
-func (s *BaseCELListener) ExitBytes(ctx *BytesContext) {}
-
-// EnterBoolTrue is called when production BoolTrue is entered.
-func (s *BaseCELListener) EnterBoolTrue(ctx *BoolTrueContext) {}
-
-// ExitBoolTrue is called when production BoolTrue is exited.
-func (s *BaseCELListener) ExitBoolTrue(ctx *BoolTrueContext) {}
-
-// EnterBoolFalse is called when production BoolFalse is entered.
-func (s *BaseCELListener) EnterBoolFalse(ctx *BoolFalseContext) {}
-
-// ExitBoolFalse is called when production BoolFalse is exited.
-func (s *BaseCELListener) ExitBoolFalse(ctx *BoolFalseContext) {}
-
-// EnterNull is called when production Null is entered.
-func (s *BaseCELListener) EnterNull(ctx *NullContext) {}
-
-// ExitNull is called when production Null is exited.
-func (s *BaseCELListener) ExitNull(ctx *NullContext) {}
diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_base_visitor.go b/vendor/github.com/google/cel-go/parser/gen/cel_base_visitor.go
deleted file mode 100644
index b2c0783d3..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/cel_base_visitor.go
+++ /dev/null
@@ -1,141 +0,0 @@
-// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT.
-
-package gen // CEL
-import "github.com/antlr4-go/antlr/v4"
-
-
-type BaseCELVisitor struct {
- *antlr.BaseParseTreeVisitor
-}
-
-func (v *BaseCELVisitor) VisitStart(ctx *StartContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitExpr(ctx *ExprContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitConditionalOr(ctx *ConditionalOrContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitConditionalAnd(ctx *ConditionalAndContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitRelation(ctx *RelationContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitCalc(ctx *CalcContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitMemberExpr(ctx *MemberExprContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitLogicalNot(ctx *LogicalNotContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitNegate(ctx *NegateContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitMemberCall(ctx *MemberCallContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitSelect(ctx *SelectContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitPrimaryExpr(ctx *PrimaryExprContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitIndex(ctx *IndexContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitIdentOrGlobalCall(ctx *IdentOrGlobalCallContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitNested(ctx *NestedContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitCreateList(ctx *CreateListContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitCreateStruct(ctx *CreateStructContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitCreateMessage(ctx *CreateMessageContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitConstantLiteral(ctx *ConstantLiteralContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitExprList(ctx *ExprListContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitListInit(ctx *ListInitContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitFieldInitializerList(ctx *FieldInitializerListContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitOptField(ctx *OptFieldContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitMapInitializerList(ctx *MapInitializerListContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitOptExpr(ctx *OptExprContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitInt(ctx *IntContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitUint(ctx *UintContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitDouble(ctx *DoubleContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitString(ctx *StringContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitBytes(ctx *BytesContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitBoolTrue(ctx *BoolTrueContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitBoolFalse(ctx *BoolFalseContext) interface{} {
- return v.VisitChildren(ctx)
-}
-
-func (v *BaseCELVisitor) VisitNull(ctx *NullContext) interface{} {
- return v.VisitChildren(ctx)
-}
diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_lexer.go b/vendor/github.com/google/cel-go/parser/gen/cel_lexer.go
deleted file mode 100644
index e026cc46f..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/cel_lexer.go
+++ /dev/null
@@ -1,344 +0,0 @@
-// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT.
-
-package gen
-import (
- "fmt"
- "sync"
- "unicode"
- "github.com/antlr4-go/antlr/v4"
-)
-// Suppress unused import error
-var _ = fmt.Printf
-var _ = sync.Once{}
-var _ = unicode.IsLetter
-
-
-type CELLexer struct {
- *antlr.BaseLexer
- channelNames []string
- modeNames []string
- // TODO: EOF string
-}
-
-var CELLexerLexerStaticData struct {
- once sync.Once
- serializedATN []int32
- ChannelNames []string
- ModeNames []string
- LiteralNames []string
- SymbolicNames []string
- RuleNames []string
- PredictionContextCache *antlr.PredictionContextCache
- atn *antlr.ATN
- decisionToDFA []*antlr.DFA
-}
-
-func cellexerLexerInit() {
- staticData := &CELLexerLexerStaticData
- staticData.ChannelNames = []string{
- "DEFAULT_TOKEN_CHANNEL", "HIDDEN",
- }
- staticData.ModeNames = []string{
- "DEFAULT_MODE",
- }
- staticData.LiteralNames = []string{
- "", "'=='", "'!='", "'in'", "'<'", "'<='", "'>='", "'>'", "'&&'", "'||'",
- "'['", "']'", "'{'", "'}'", "'('", "')'", "'.'", "','", "'-'", "'!'",
- "'?'", "':'", "'+'", "'*'", "'/'", "'%'", "'true'", "'false'", "'null'",
- }
- staticData.SymbolicNames = []string{
- "", "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS",
- "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE",
- "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK",
- "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE",
- "NUL", "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT",
- "STRING", "BYTES", "IDENTIFIER",
- }
- staticData.RuleNames = []string{
- "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS",
- "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE",
- "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK",
- "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE",
- "NUL", "BACKSLASH", "LETTER", "DIGIT", "EXPONENT", "HEXDIGIT", "RAW",
- "ESC_SEQ", "ESC_CHAR_SEQ", "ESC_OCT_SEQ", "ESC_BYTE_SEQ", "ESC_UNI_SEQ",
- "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", "STRING",
- "BYTES", "IDENTIFIER",
- }
- staticData.PredictionContextCache = antlr.NewPredictionContextCache()
- staticData.serializedATN = []int32{
- 4, 0, 36, 423, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
- 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
- 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
- 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
- 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
- 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
- 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
- 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7,
- 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46,
- 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4,
- 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8,
- 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13,
- 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1,
- 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24,
- 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1,
- 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29,
- 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 177, 8, 31, 1, 31, 4, 31, 180, 8, 31,
- 11, 31, 12, 31, 181, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1,
- 34, 3, 34, 192, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36,
- 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1,
- 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38,
- 1, 38, 1, 38, 1, 38, 3, 38, 225, 8, 38, 1, 39, 4, 39, 228, 8, 39, 11, 39,
- 12, 39, 229, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 238, 8, 40,
- 10, 40, 12, 40, 241, 9, 40, 1, 40, 1, 40, 1, 41, 4, 41, 246, 8, 41, 11,
- 41, 12, 41, 247, 1, 41, 1, 41, 4, 41, 252, 8, 41, 11, 41, 12, 41, 253,
- 1, 41, 3, 41, 257, 8, 41, 1, 41, 4, 41, 260, 8, 41, 11, 41, 12, 41, 261,
- 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 268, 8, 41, 11, 41, 12, 41, 269, 1,
- 41, 3, 41, 273, 8, 41, 3, 41, 275, 8, 41, 1, 42, 4, 42, 278, 8, 42, 11,
- 42, 12, 42, 279, 1, 42, 1, 42, 1, 42, 1, 42, 4, 42, 286, 8, 42, 11, 42,
- 12, 42, 287, 3, 42, 290, 8, 42, 1, 43, 4, 43, 293, 8, 43, 11, 43, 12, 43,
- 294, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 4, 43, 303, 8, 43, 11, 43,
- 12, 43, 304, 1, 43, 1, 43, 3, 43, 309, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44,
- 314, 8, 44, 10, 44, 12, 44, 317, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5,
- 44, 323, 8, 44, 10, 44, 12, 44, 326, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44,
- 1, 44, 1, 44, 1, 44, 5, 44, 335, 8, 44, 10, 44, 12, 44, 338, 9, 44, 1,
- 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 349,
- 8, 44, 10, 44, 12, 44, 352, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1,
- 44, 5, 44, 360, 8, 44, 10, 44, 12, 44, 363, 9, 44, 1, 44, 1, 44, 1, 44,
- 1, 44, 1, 44, 5, 44, 370, 8, 44, 10, 44, 12, 44, 373, 9, 44, 1, 44, 1,
- 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 383, 8, 44, 10, 44,
- 12, 44, 386, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1,
- 44, 1, 44, 1, 44, 5, 44, 398, 8, 44, 10, 44, 12, 44, 401, 9, 44, 1, 44,
- 1, 44, 1, 44, 1, 44, 3, 44, 407, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1,
- 46, 3, 46, 414, 8, 46, 1, 46, 1, 46, 1, 46, 5, 46, 419, 8, 46, 10, 46,
- 12, 46, 422, 9, 46, 4, 336, 350, 384, 399, 0, 47, 1, 1, 3, 2, 5, 3, 7,
- 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27,
- 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45,
- 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 0, 61, 0, 63, 0,
- 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 29, 81, 30, 83, 31,
- 85, 32, 87, 33, 89, 34, 91, 35, 93, 36, 1, 0, 16, 2, 0, 65, 90, 97, 122,
- 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 3, 0, 48, 57, 65, 70, 97,
- 102, 2, 0, 82, 82, 114, 114, 10, 0, 34, 34, 39, 39, 63, 63, 92, 92, 96,
- 98, 102, 102, 110, 110, 114, 114, 116, 116, 118, 118, 2, 0, 88, 88, 120,
- 120, 3, 0, 9, 10, 12, 13, 32, 32, 1, 0, 10, 10, 2, 0, 85, 85, 117, 117,
- 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 4, 0, 10, 10, 13, 13, 39, 39, 92,
- 92, 1, 0, 92, 92, 3, 0, 10, 10, 13, 13, 34, 34, 3, 0, 10, 10, 13, 13, 39,
- 39, 2, 0, 66, 66, 98, 98, 456, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5,
- 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13,
- 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0,
- 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0,
- 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0,
- 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0,
- 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1,
- 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81,
- 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0,
- 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0,
- 3, 98, 1, 0, 0, 0, 5, 101, 1, 0, 0, 0, 7, 104, 1, 0, 0, 0, 9, 106, 1, 0,
- 0, 0, 11, 109, 1, 0, 0, 0, 13, 112, 1, 0, 0, 0, 15, 114, 1, 0, 0, 0, 17,
- 117, 1, 0, 0, 0, 19, 120, 1, 0, 0, 0, 21, 122, 1, 0, 0, 0, 23, 124, 1,
- 0, 0, 0, 25, 126, 1, 0, 0, 0, 27, 128, 1, 0, 0, 0, 29, 130, 1, 0, 0, 0,
- 31, 132, 1, 0, 0, 0, 33, 134, 1, 0, 0, 0, 35, 136, 1, 0, 0, 0, 37, 138,
- 1, 0, 0, 0, 39, 140, 1, 0, 0, 0, 41, 142, 1, 0, 0, 0, 43, 144, 1, 0, 0,
- 0, 45, 146, 1, 0, 0, 0, 47, 148, 1, 0, 0, 0, 49, 150, 1, 0, 0, 0, 51, 152,
- 1, 0, 0, 0, 53, 157, 1, 0, 0, 0, 55, 163, 1, 0, 0, 0, 57, 168, 1, 0, 0,
- 0, 59, 170, 1, 0, 0, 0, 61, 172, 1, 0, 0, 0, 63, 174, 1, 0, 0, 0, 65, 183,
- 1, 0, 0, 0, 67, 185, 1, 0, 0, 0, 69, 191, 1, 0, 0, 0, 71, 193, 1, 0, 0,
- 0, 73, 196, 1, 0, 0, 0, 75, 201, 1, 0, 0, 0, 77, 224, 1, 0, 0, 0, 79, 227,
- 1, 0, 0, 0, 81, 233, 1, 0, 0, 0, 83, 274, 1, 0, 0, 0, 85, 289, 1, 0, 0,
- 0, 87, 308, 1, 0, 0, 0, 89, 406, 1, 0, 0, 0, 91, 408, 1, 0, 0, 0, 93, 413,
- 1, 0, 0, 0, 95, 96, 5, 61, 0, 0, 96, 97, 5, 61, 0, 0, 97, 2, 1, 0, 0, 0,
- 98, 99, 5, 33, 0, 0, 99, 100, 5, 61, 0, 0, 100, 4, 1, 0, 0, 0, 101, 102,
- 5, 105, 0, 0, 102, 103, 5, 110, 0, 0, 103, 6, 1, 0, 0, 0, 104, 105, 5,
- 60, 0, 0, 105, 8, 1, 0, 0, 0, 106, 107, 5, 60, 0, 0, 107, 108, 5, 61, 0,
- 0, 108, 10, 1, 0, 0, 0, 109, 110, 5, 62, 0, 0, 110, 111, 5, 61, 0, 0, 111,
- 12, 1, 0, 0, 0, 112, 113, 5, 62, 0, 0, 113, 14, 1, 0, 0, 0, 114, 115, 5,
- 38, 0, 0, 115, 116, 5, 38, 0, 0, 116, 16, 1, 0, 0, 0, 117, 118, 5, 124,
- 0, 0, 118, 119, 5, 124, 0, 0, 119, 18, 1, 0, 0, 0, 120, 121, 5, 91, 0,
- 0, 121, 20, 1, 0, 0, 0, 122, 123, 5, 93, 0, 0, 123, 22, 1, 0, 0, 0, 124,
- 125, 5, 123, 0, 0, 125, 24, 1, 0, 0, 0, 126, 127, 5, 125, 0, 0, 127, 26,
- 1, 0, 0, 0, 128, 129, 5, 40, 0, 0, 129, 28, 1, 0, 0, 0, 130, 131, 5, 41,
- 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 5, 46, 0, 0, 133, 32, 1, 0, 0, 0,
- 134, 135, 5, 44, 0, 0, 135, 34, 1, 0, 0, 0, 136, 137, 5, 45, 0, 0, 137,
- 36, 1, 0, 0, 0, 138, 139, 5, 33, 0, 0, 139, 38, 1, 0, 0, 0, 140, 141, 5,
- 63, 0, 0, 141, 40, 1, 0, 0, 0, 142, 143, 5, 58, 0, 0, 143, 42, 1, 0, 0,
- 0, 144, 145, 5, 43, 0, 0, 145, 44, 1, 0, 0, 0, 146, 147, 5, 42, 0, 0, 147,
- 46, 1, 0, 0, 0, 148, 149, 5, 47, 0, 0, 149, 48, 1, 0, 0, 0, 150, 151, 5,
- 37, 0, 0, 151, 50, 1, 0, 0, 0, 152, 153, 5, 116, 0, 0, 153, 154, 5, 114,
- 0, 0, 154, 155, 5, 117, 0, 0, 155, 156, 5, 101, 0, 0, 156, 52, 1, 0, 0,
- 0, 157, 158, 5, 102, 0, 0, 158, 159, 5, 97, 0, 0, 159, 160, 5, 108, 0,
- 0, 160, 161, 5, 115, 0, 0, 161, 162, 5, 101, 0, 0, 162, 54, 1, 0, 0, 0,
- 163, 164, 5, 110, 0, 0, 164, 165, 5, 117, 0, 0, 165, 166, 5, 108, 0, 0,
- 166, 167, 5, 108, 0, 0, 167, 56, 1, 0, 0, 0, 168, 169, 5, 92, 0, 0, 169,
- 58, 1, 0, 0, 0, 170, 171, 7, 0, 0, 0, 171, 60, 1, 0, 0, 0, 172, 173, 2,
- 48, 57, 0, 173, 62, 1, 0, 0, 0, 174, 176, 7, 1, 0, 0, 175, 177, 7, 2, 0,
- 0, 176, 175, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 179, 1, 0, 0, 0, 178,
- 180, 3, 61, 30, 0, 179, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 179,
- 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 64, 1, 0, 0, 0, 183, 184, 7, 3,
- 0, 0, 184, 66, 1, 0, 0, 0, 185, 186, 7, 4, 0, 0, 186, 68, 1, 0, 0, 0, 187,
- 192, 3, 71, 35, 0, 188, 192, 3, 75, 37, 0, 189, 192, 3, 77, 38, 0, 190,
- 192, 3, 73, 36, 0, 191, 187, 1, 0, 0, 0, 191, 188, 1, 0, 0, 0, 191, 189,
- 1, 0, 0, 0, 191, 190, 1, 0, 0, 0, 192, 70, 1, 0, 0, 0, 193, 194, 3, 57,
- 28, 0, 194, 195, 7, 5, 0, 0, 195, 72, 1, 0, 0, 0, 196, 197, 3, 57, 28,
- 0, 197, 198, 2, 48, 51, 0, 198, 199, 2, 48, 55, 0, 199, 200, 2, 48, 55,
- 0, 200, 74, 1, 0, 0, 0, 201, 202, 3, 57, 28, 0, 202, 203, 7, 6, 0, 0, 203,
- 204, 3, 65, 32, 0, 204, 205, 3, 65, 32, 0, 205, 76, 1, 0, 0, 0, 206, 207,
- 3, 57, 28, 0, 207, 208, 5, 117, 0, 0, 208, 209, 3, 65, 32, 0, 209, 210,
- 3, 65, 32, 0, 210, 211, 3, 65, 32, 0, 211, 212, 3, 65, 32, 0, 212, 225,
- 1, 0, 0, 0, 213, 214, 3, 57, 28, 0, 214, 215, 5, 85, 0, 0, 215, 216, 3,
- 65, 32, 0, 216, 217, 3, 65, 32, 0, 217, 218, 3, 65, 32, 0, 218, 219, 3,
- 65, 32, 0, 219, 220, 3, 65, 32, 0, 220, 221, 3, 65, 32, 0, 221, 222, 3,
- 65, 32, 0, 222, 223, 3, 65, 32, 0, 223, 225, 1, 0, 0, 0, 224, 206, 1, 0,
- 0, 0, 224, 213, 1, 0, 0, 0, 225, 78, 1, 0, 0, 0, 226, 228, 7, 7, 0, 0,
- 227, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 227, 1, 0, 0, 0, 229,
- 230, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 6, 39, 0, 0, 232, 80,
- 1, 0, 0, 0, 233, 234, 5, 47, 0, 0, 234, 235, 5, 47, 0, 0, 235, 239, 1,
- 0, 0, 0, 236, 238, 8, 8, 0, 0, 237, 236, 1, 0, 0, 0, 238, 241, 1, 0, 0,
- 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 242, 1, 0, 0, 0, 241,
- 239, 1, 0, 0, 0, 242, 243, 6, 40, 0, 0, 243, 82, 1, 0, 0, 0, 244, 246,
- 3, 61, 30, 0, 245, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 245, 1,
- 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 5, 46, 0,
- 0, 250, 252, 3, 61, 30, 0, 251, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0,
- 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 256, 1, 0, 0, 0, 255,
- 257, 3, 63, 31, 0, 256, 255, 1, 0, 0, 0, 256, 257, 1, 0, 0, 0, 257, 275,
- 1, 0, 0, 0, 258, 260, 3, 61, 30, 0, 259, 258, 1, 0, 0, 0, 260, 261, 1,
- 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 1, 0, 0,
- 0, 263, 264, 3, 63, 31, 0, 264, 275, 1, 0, 0, 0, 265, 267, 5, 46, 0, 0,
- 266, 268, 3, 61, 30, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269,
- 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 272, 1, 0, 0, 0, 271, 273,
- 3, 63, 31, 0, 272, 271, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 275, 1,
- 0, 0, 0, 274, 245, 1, 0, 0, 0, 274, 259, 1, 0, 0, 0, 274, 265, 1, 0, 0,
- 0, 275, 84, 1, 0, 0, 0, 276, 278, 3, 61, 30, 0, 277, 276, 1, 0, 0, 0, 278,
- 279, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 290,
- 1, 0, 0, 0, 281, 282, 5, 48, 0, 0, 282, 283, 5, 120, 0, 0, 283, 285, 1,
- 0, 0, 0, 284, 286, 3, 65, 32, 0, 285, 284, 1, 0, 0, 0, 286, 287, 1, 0,
- 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 290, 1, 0, 0, 0,
- 289, 277, 1, 0, 0, 0, 289, 281, 1, 0, 0, 0, 290, 86, 1, 0, 0, 0, 291, 293,
- 3, 61, 30, 0, 292, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 292, 1,
- 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 1, 0, 0, 0, 296, 297, 7, 9, 0,
- 0, 297, 309, 1, 0, 0, 0, 298, 299, 5, 48, 0, 0, 299, 300, 5, 120, 0, 0,
- 300, 302, 1, 0, 0, 0, 301, 303, 3, 65, 32, 0, 302, 301, 1, 0, 0, 0, 303,
- 304, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306,
- 1, 0, 0, 0, 306, 307, 7, 9, 0, 0, 307, 309, 1, 0, 0, 0, 308, 292, 1, 0,
- 0, 0, 308, 298, 1, 0, 0, 0, 309, 88, 1, 0, 0, 0, 310, 315, 5, 34, 0, 0,
- 311, 314, 3, 69, 34, 0, 312, 314, 8, 10, 0, 0, 313, 311, 1, 0, 0, 0, 313,
- 312, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316,
- 1, 0, 0, 0, 316, 318, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 407, 5, 34,
- 0, 0, 319, 324, 5, 39, 0, 0, 320, 323, 3, 69, 34, 0, 321, 323, 8, 11, 0,
- 0, 322, 320, 1, 0, 0, 0, 322, 321, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324,
- 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 327, 1, 0, 0, 0, 326, 324,
- 1, 0, 0, 0, 327, 407, 5, 39, 0, 0, 328, 329, 5, 34, 0, 0, 329, 330, 5,
- 34, 0, 0, 330, 331, 5, 34, 0, 0, 331, 336, 1, 0, 0, 0, 332, 335, 3, 69,
- 34, 0, 333, 335, 8, 12, 0, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0,
- 0, 335, 338, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337,
- 339, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 339, 340, 5, 34, 0, 0, 340, 341,
- 5, 34, 0, 0, 341, 407, 5, 34, 0, 0, 342, 343, 5, 39, 0, 0, 343, 344, 5,
- 39, 0, 0, 344, 345, 5, 39, 0, 0, 345, 350, 1, 0, 0, 0, 346, 349, 3, 69,
- 34, 0, 347, 349, 8, 12, 0, 0, 348, 346, 1, 0, 0, 0, 348, 347, 1, 0, 0,
- 0, 349, 352, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351,
- 353, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 354, 5, 39, 0, 0, 354, 355,
- 5, 39, 0, 0, 355, 407, 5, 39, 0, 0, 356, 357, 3, 67, 33, 0, 357, 361, 5,
- 34, 0, 0, 358, 360, 8, 13, 0, 0, 359, 358, 1, 0, 0, 0, 360, 363, 1, 0,
- 0, 0, 361, 359, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 364, 1, 0, 0, 0,
- 363, 361, 1, 0, 0, 0, 364, 365, 5, 34, 0, 0, 365, 407, 1, 0, 0, 0, 366,
- 367, 3, 67, 33, 0, 367, 371, 5, 39, 0, 0, 368, 370, 8, 14, 0, 0, 369, 368,
- 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0,
- 0, 0, 372, 374, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 39, 0, 0,
- 375, 407, 1, 0, 0, 0, 376, 377, 3, 67, 33, 0, 377, 378, 5, 34, 0, 0, 378,
- 379, 5, 34, 0, 0, 379, 380, 5, 34, 0, 0, 380, 384, 1, 0, 0, 0, 381, 383,
- 9, 0, 0, 0, 382, 381, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 385, 1, 0,
- 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0,
- 387, 388, 5, 34, 0, 0, 388, 389, 5, 34, 0, 0, 389, 390, 5, 34, 0, 0, 390,
- 407, 1, 0, 0, 0, 391, 392, 3, 67, 33, 0, 392, 393, 5, 39, 0, 0, 393, 394,
- 5, 39, 0, 0, 394, 395, 5, 39, 0, 0, 395, 399, 1, 0, 0, 0, 396, 398, 9,
- 0, 0, 0, 397, 396, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 400, 1, 0, 0,
- 0, 399, 397, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402,
- 403, 5, 39, 0, 0, 403, 404, 5, 39, 0, 0, 404, 405, 5, 39, 0, 0, 405, 407,
- 1, 0, 0, 0, 406, 310, 1, 0, 0, 0, 406, 319, 1, 0, 0, 0, 406, 328, 1, 0,
- 0, 0, 406, 342, 1, 0, 0, 0, 406, 356, 1, 0, 0, 0, 406, 366, 1, 0, 0, 0,
- 406, 376, 1, 0, 0, 0, 406, 391, 1, 0, 0, 0, 407, 90, 1, 0, 0, 0, 408, 409,
- 7, 15, 0, 0, 409, 410, 3, 89, 44, 0, 410, 92, 1, 0, 0, 0, 411, 414, 3,
- 59, 29, 0, 412, 414, 5, 95, 0, 0, 413, 411, 1, 0, 0, 0, 413, 412, 1, 0,
- 0, 0, 414, 420, 1, 0, 0, 0, 415, 419, 3, 59, 29, 0, 416, 419, 3, 61, 30,
- 0, 417, 419, 5, 95, 0, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418,
- 417, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421,
- 1, 0, 0, 0, 421, 94, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 36, 0, 176, 181,
- 191, 224, 229, 239, 247, 253, 256, 261, 269, 272, 274, 279, 287, 289, 294,
- 304, 308, 313, 315, 322, 324, 334, 336, 348, 350, 361, 371, 384, 399, 406,
- 413, 418, 420, 1, 0, 1, 0,
-}
- deserializer := antlr.NewATNDeserializer(nil)
- staticData.atn = deserializer.Deserialize(staticData.serializedATN)
- atn := staticData.atn
- staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
- decisionToDFA := staticData.decisionToDFA
- for index, state := range atn.DecisionToState {
- decisionToDFA[index] = antlr.NewDFA(state, index)
- }
-}
-
-// CELLexerInit initializes any static state used to implement CELLexer. By default the
-// static state used to implement the lexer is lazily initialized during the first call to
-// NewCELLexer(). You can call this function if you wish to initialize the static state ahead
-// of time.
-func CELLexerInit() {
- staticData := &CELLexerLexerStaticData
- staticData.once.Do(cellexerLexerInit)
-}
-
-// NewCELLexer produces a new lexer instance for the optional input antlr.CharStream.
-func NewCELLexer(input antlr.CharStream) *CELLexer {
- CELLexerInit()
- l := new(CELLexer)
- l.BaseLexer = antlr.NewBaseLexer(input)
- staticData := &CELLexerLexerStaticData
- l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
- l.channelNames = staticData.ChannelNames
- l.modeNames = staticData.ModeNames
- l.RuleNames = staticData.RuleNames
- l.LiteralNames = staticData.LiteralNames
- l.SymbolicNames = staticData.SymbolicNames
- l.GrammarFileName = "CEL.g4"
- // TODO: l.EOF = antlr.TokenEOF
-
- return l
-}
-
-// CELLexer tokens.
-const (
- CELLexerEQUALS = 1
- CELLexerNOT_EQUALS = 2
- CELLexerIN = 3
- CELLexerLESS = 4
- CELLexerLESS_EQUALS = 5
- CELLexerGREATER_EQUALS = 6
- CELLexerGREATER = 7
- CELLexerLOGICAL_AND = 8
- CELLexerLOGICAL_OR = 9
- CELLexerLBRACKET = 10
- CELLexerRPRACKET = 11
- CELLexerLBRACE = 12
- CELLexerRBRACE = 13
- CELLexerLPAREN = 14
- CELLexerRPAREN = 15
- CELLexerDOT = 16
- CELLexerCOMMA = 17
- CELLexerMINUS = 18
- CELLexerEXCLAM = 19
- CELLexerQUESTIONMARK = 20
- CELLexerCOLON = 21
- CELLexerPLUS = 22
- CELLexerSTAR = 23
- CELLexerSLASH = 24
- CELLexerPERCENT = 25
- CELLexerCEL_TRUE = 26
- CELLexerCEL_FALSE = 27
- CELLexerNUL = 28
- CELLexerWHITESPACE = 29
- CELLexerCOMMENT = 30
- CELLexerNUM_FLOAT = 31
- CELLexerNUM_INT = 32
- CELLexerNUM_UINT = 33
- CELLexerSTRING = 34
- CELLexerBYTES = 35
- CELLexerIDENTIFIER = 36
-)
-
diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_listener.go b/vendor/github.com/google/cel-go/parser/gen/cel_listener.go
deleted file mode 100644
index 22dc99789..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/cel_listener.go
+++ /dev/null
@@ -1,208 +0,0 @@
-// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT.
-
-package gen // CEL
-import "github.com/antlr4-go/antlr/v4"
-
-
-// CELListener is a complete listener for a parse tree produced by CELParser.
-type CELListener interface {
- antlr.ParseTreeListener
-
- // EnterStart is called when entering the start production.
- EnterStart(c *StartContext)
-
- // EnterExpr is called when entering the expr production.
- EnterExpr(c *ExprContext)
-
- // EnterConditionalOr is called when entering the conditionalOr production.
- EnterConditionalOr(c *ConditionalOrContext)
-
- // EnterConditionalAnd is called when entering the conditionalAnd production.
- EnterConditionalAnd(c *ConditionalAndContext)
-
- // EnterRelation is called when entering the relation production.
- EnterRelation(c *RelationContext)
-
- // EnterCalc is called when entering the calc production.
- EnterCalc(c *CalcContext)
-
- // EnterMemberExpr is called when entering the MemberExpr production.
- EnterMemberExpr(c *MemberExprContext)
-
- // EnterLogicalNot is called when entering the LogicalNot production.
- EnterLogicalNot(c *LogicalNotContext)
-
- // EnterNegate is called when entering the Negate production.
- EnterNegate(c *NegateContext)
-
- // EnterMemberCall is called when entering the MemberCall production.
- EnterMemberCall(c *MemberCallContext)
-
- // EnterSelect is called when entering the Select production.
- EnterSelect(c *SelectContext)
-
- // EnterPrimaryExpr is called when entering the PrimaryExpr production.
- EnterPrimaryExpr(c *PrimaryExprContext)
-
- // EnterIndex is called when entering the Index production.
- EnterIndex(c *IndexContext)
-
- // EnterIdentOrGlobalCall is called when entering the IdentOrGlobalCall production.
- EnterIdentOrGlobalCall(c *IdentOrGlobalCallContext)
-
- // EnterNested is called when entering the Nested production.
- EnterNested(c *NestedContext)
-
- // EnterCreateList is called when entering the CreateList production.
- EnterCreateList(c *CreateListContext)
-
- // EnterCreateStruct is called when entering the CreateStruct production.
- EnterCreateStruct(c *CreateStructContext)
-
- // EnterCreateMessage is called when entering the CreateMessage production.
- EnterCreateMessage(c *CreateMessageContext)
-
- // EnterConstantLiteral is called when entering the ConstantLiteral production.
- EnterConstantLiteral(c *ConstantLiteralContext)
-
- // EnterExprList is called when entering the exprList production.
- EnterExprList(c *ExprListContext)
-
- // EnterListInit is called when entering the listInit production.
- EnterListInit(c *ListInitContext)
-
- // EnterFieldInitializerList is called when entering the fieldInitializerList production.
- EnterFieldInitializerList(c *FieldInitializerListContext)
-
- // EnterOptField is called when entering the optField production.
- EnterOptField(c *OptFieldContext)
-
- // EnterMapInitializerList is called when entering the mapInitializerList production.
- EnterMapInitializerList(c *MapInitializerListContext)
-
- // EnterOptExpr is called when entering the optExpr production.
- EnterOptExpr(c *OptExprContext)
-
- // EnterInt is called when entering the Int production.
- EnterInt(c *IntContext)
-
- // EnterUint is called when entering the Uint production.
- EnterUint(c *UintContext)
-
- // EnterDouble is called when entering the Double production.
- EnterDouble(c *DoubleContext)
-
- // EnterString is called when entering the String production.
- EnterString(c *StringContext)
-
- // EnterBytes is called when entering the Bytes production.
- EnterBytes(c *BytesContext)
-
- // EnterBoolTrue is called when entering the BoolTrue production.
- EnterBoolTrue(c *BoolTrueContext)
-
- // EnterBoolFalse is called when entering the BoolFalse production.
- EnterBoolFalse(c *BoolFalseContext)
-
- // EnterNull is called when entering the Null production.
- EnterNull(c *NullContext)
-
- // ExitStart is called when exiting the start production.
- ExitStart(c *StartContext)
-
- // ExitExpr is called when exiting the expr production.
- ExitExpr(c *ExprContext)
-
- // ExitConditionalOr is called when exiting the conditionalOr production.
- ExitConditionalOr(c *ConditionalOrContext)
-
- // ExitConditionalAnd is called when exiting the conditionalAnd production.
- ExitConditionalAnd(c *ConditionalAndContext)
-
- // ExitRelation is called when exiting the relation production.
- ExitRelation(c *RelationContext)
-
- // ExitCalc is called when exiting the calc production.
- ExitCalc(c *CalcContext)
-
- // ExitMemberExpr is called when exiting the MemberExpr production.
- ExitMemberExpr(c *MemberExprContext)
-
- // ExitLogicalNot is called when exiting the LogicalNot production.
- ExitLogicalNot(c *LogicalNotContext)
-
- // ExitNegate is called when exiting the Negate production.
- ExitNegate(c *NegateContext)
-
- // ExitMemberCall is called when exiting the MemberCall production.
- ExitMemberCall(c *MemberCallContext)
-
- // ExitSelect is called when exiting the Select production.
- ExitSelect(c *SelectContext)
-
- // ExitPrimaryExpr is called when exiting the PrimaryExpr production.
- ExitPrimaryExpr(c *PrimaryExprContext)
-
- // ExitIndex is called when exiting the Index production.
- ExitIndex(c *IndexContext)
-
- // ExitIdentOrGlobalCall is called when exiting the IdentOrGlobalCall production.
- ExitIdentOrGlobalCall(c *IdentOrGlobalCallContext)
-
- // ExitNested is called when exiting the Nested production.
- ExitNested(c *NestedContext)
-
- // ExitCreateList is called when exiting the CreateList production.
- ExitCreateList(c *CreateListContext)
-
- // ExitCreateStruct is called when exiting the CreateStruct production.
- ExitCreateStruct(c *CreateStructContext)
-
- // ExitCreateMessage is called when exiting the CreateMessage production.
- ExitCreateMessage(c *CreateMessageContext)
-
- // ExitConstantLiteral is called when exiting the ConstantLiteral production.
- ExitConstantLiteral(c *ConstantLiteralContext)
-
- // ExitExprList is called when exiting the exprList production.
- ExitExprList(c *ExprListContext)
-
- // ExitListInit is called when exiting the listInit production.
- ExitListInit(c *ListInitContext)
-
- // ExitFieldInitializerList is called when exiting the fieldInitializerList production.
- ExitFieldInitializerList(c *FieldInitializerListContext)
-
- // ExitOptField is called when exiting the optField production.
- ExitOptField(c *OptFieldContext)
-
- // ExitMapInitializerList is called when exiting the mapInitializerList production.
- ExitMapInitializerList(c *MapInitializerListContext)
-
- // ExitOptExpr is called when exiting the optExpr production.
- ExitOptExpr(c *OptExprContext)
-
- // ExitInt is called when exiting the Int production.
- ExitInt(c *IntContext)
-
- // ExitUint is called when exiting the Uint production.
- ExitUint(c *UintContext)
-
- // ExitDouble is called when exiting the Double production.
- ExitDouble(c *DoubleContext)
-
- // ExitString is called when exiting the String production.
- ExitString(c *StringContext)
-
- // ExitBytes is called when exiting the Bytes production.
- ExitBytes(c *BytesContext)
-
- // ExitBoolTrue is called when exiting the BoolTrue production.
- ExitBoolTrue(c *BoolTrueContext)
-
- // ExitBoolFalse is called when exiting the BoolFalse production.
- ExitBoolFalse(c *BoolFalseContext)
-
- // ExitNull is called when exiting the Null production.
- ExitNull(c *NullContext)
-}
diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_parser.go b/vendor/github.com/google/cel-go/parser/gen/cel_parser.go
deleted file mode 100644
index 35334af61..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/cel_parser.go
+++ /dev/null
@@ -1,6274 +0,0 @@
-// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT.
-
-package gen // CEL
-import (
- "fmt"
- "strconv"
- "sync"
-
- "github.com/antlr4-go/antlr/v4"
-)
-
-// Suppress unused import errors
-var _ = fmt.Printf
-var _ = strconv.Itoa
-var _ = sync.Once{}
-
-
-type CELParser struct {
- *antlr.BaseParser
-}
-
-var CELParserStaticData struct {
- once sync.Once
- serializedATN []int32
- LiteralNames []string
- SymbolicNames []string
- RuleNames []string
- PredictionContextCache *antlr.PredictionContextCache
- atn *antlr.ATN
- decisionToDFA []*antlr.DFA
-}
-
-func celParserInit() {
- staticData := &CELParserStaticData
- staticData.LiteralNames = []string{
- "", "'=='", "'!='", "'in'", "'<'", "'<='", "'>='", "'>'", "'&&'", "'||'",
- "'['", "']'", "'{'", "'}'", "'('", "')'", "'.'", "','", "'-'", "'!'",
- "'?'", "':'", "'+'", "'*'", "'/'", "'%'", "'true'", "'false'", "'null'",
- }
- staticData.SymbolicNames = []string{
- "", "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS",
- "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE",
- "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK",
- "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE",
- "NUL", "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT",
- "STRING", "BYTES", "IDENTIFIER",
- }
- staticData.RuleNames = []string{
- "start", "expr", "conditionalOr", "conditionalAnd", "relation", "calc",
- "unary", "member", "primary", "exprList", "listInit", "fieldInitializerList",
- "optField", "mapInitializerList", "optExpr", "literal",
- }
- staticData.PredictionContextCache = antlr.NewPredictionContextCache()
- staticData.serializedATN = []int32{
- 4, 1, 36, 251, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
- 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7,
- 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15,
- 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 42, 8, 1, 1,
- 2, 1, 2, 1, 2, 5, 2, 47, 8, 2, 10, 2, 12, 2, 50, 9, 2, 1, 3, 1, 3, 1, 3,
- 5, 3, 55, 8, 3, 10, 3, 12, 3, 58, 9, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1,
- 4, 5, 4, 66, 8, 4, 10, 4, 12, 4, 69, 9, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5,
- 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 80, 8, 5, 10, 5, 12, 5, 83, 9, 5, 1, 6, 1,
- 6, 4, 6, 87, 8, 6, 11, 6, 12, 6, 88, 1, 6, 1, 6, 4, 6, 93, 8, 6, 11, 6,
- 12, 6, 94, 1, 6, 3, 6, 98, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3,
- 7, 106, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 114, 8, 7, 1, 7,
- 1, 7, 1, 7, 1, 7, 3, 7, 120, 8, 7, 1, 7, 1, 7, 1, 7, 5, 7, 125, 8, 7, 10,
- 7, 12, 7, 128, 9, 7, 1, 8, 3, 8, 131, 8, 8, 1, 8, 1, 8, 1, 8, 3, 8, 136,
- 8, 8, 1, 8, 3, 8, 139, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8,
- 147, 8, 8, 1, 8, 3, 8, 150, 8, 8, 1, 8, 1, 8, 1, 8, 3, 8, 155, 8, 8, 1,
- 8, 3, 8, 158, 8, 8, 1, 8, 1, 8, 3, 8, 162, 8, 8, 1, 8, 1, 8, 1, 8, 5, 8,
- 167, 8, 8, 10, 8, 12, 8, 170, 9, 8, 1, 8, 1, 8, 3, 8, 174, 8, 8, 1, 8,
- 3, 8, 177, 8, 8, 1, 8, 1, 8, 3, 8, 181, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 186,
- 8, 9, 10, 9, 12, 9, 189, 9, 9, 1, 10, 1, 10, 1, 10, 5, 10, 194, 8, 10,
- 10, 10, 12, 10, 197, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1,
- 11, 1, 11, 5, 11, 207, 8, 11, 10, 11, 12, 11, 210, 9, 11, 1, 12, 3, 12,
- 213, 8, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1,
- 13, 1, 13, 5, 13, 225, 8, 13, 10, 13, 12, 13, 228, 9, 13, 1, 14, 3, 14,
- 231, 8, 14, 1, 14, 1, 14, 1, 15, 3, 15, 236, 8, 15, 1, 15, 1, 15, 1, 15,
- 3, 15, 241, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 249,
- 8, 15, 1, 15, 0, 3, 8, 10, 14, 16, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20,
- 22, 24, 26, 28, 30, 0, 3, 1, 0, 1, 7, 1, 0, 23, 25, 2, 0, 18, 18, 22, 22,
- 281, 0, 32, 1, 0, 0, 0, 2, 35, 1, 0, 0, 0, 4, 43, 1, 0, 0, 0, 6, 51, 1,
- 0, 0, 0, 8, 59, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 97, 1, 0, 0, 0, 14,
- 99, 1, 0, 0, 0, 16, 180, 1, 0, 0, 0, 18, 182, 1, 0, 0, 0, 20, 190, 1, 0,
- 0, 0, 22, 198, 1, 0, 0, 0, 24, 212, 1, 0, 0, 0, 26, 216, 1, 0, 0, 0, 28,
- 230, 1, 0, 0, 0, 30, 248, 1, 0, 0, 0, 32, 33, 3, 2, 1, 0, 33, 34, 5, 0,
- 0, 1, 34, 1, 1, 0, 0, 0, 35, 41, 3, 4, 2, 0, 36, 37, 5, 20, 0, 0, 37, 38,
- 3, 4, 2, 0, 38, 39, 5, 21, 0, 0, 39, 40, 3, 2, 1, 0, 40, 42, 1, 0, 0, 0,
- 41, 36, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 3, 1, 0, 0, 0, 43, 48, 3, 6,
- 3, 0, 44, 45, 5, 9, 0, 0, 45, 47, 3, 6, 3, 0, 46, 44, 1, 0, 0, 0, 47, 50,
- 1, 0, 0, 0, 48, 46, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 5, 1, 0, 0, 0,
- 50, 48, 1, 0, 0, 0, 51, 56, 3, 8, 4, 0, 52, 53, 5, 8, 0, 0, 53, 55, 3,
- 8, 4, 0, 54, 52, 1, 0, 0, 0, 55, 58, 1, 0, 0, 0, 56, 54, 1, 0, 0, 0, 56,
- 57, 1, 0, 0, 0, 57, 7, 1, 0, 0, 0, 58, 56, 1, 0, 0, 0, 59, 60, 6, 4, -1,
- 0, 60, 61, 3, 10, 5, 0, 61, 67, 1, 0, 0, 0, 62, 63, 10, 1, 0, 0, 63, 64,
- 7, 0, 0, 0, 64, 66, 3, 8, 4, 2, 65, 62, 1, 0, 0, 0, 66, 69, 1, 0, 0, 0,
- 67, 65, 1, 0, 0, 0, 67, 68, 1, 0, 0, 0, 68, 9, 1, 0, 0, 0, 69, 67, 1, 0,
- 0, 0, 70, 71, 6, 5, -1, 0, 71, 72, 3, 12, 6, 0, 72, 81, 1, 0, 0, 0, 73,
- 74, 10, 2, 0, 0, 74, 75, 7, 1, 0, 0, 75, 80, 3, 10, 5, 3, 76, 77, 10, 1,
- 0, 0, 77, 78, 7, 2, 0, 0, 78, 80, 3, 10, 5, 2, 79, 73, 1, 0, 0, 0, 79,
- 76, 1, 0, 0, 0, 80, 83, 1, 0, 0, 0, 81, 79, 1, 0, 0, 0, 81, 82, 1, 0, 0,
- 0, 82, 11, 1, 0, 0, 0, 83, 81, 1, 0, 0, 0, 84, 98, 3, 14, 7, 0, 85, 87,
- 5, 19, 0, 0, 86, 85, 1, 0, 0, 0, 87, 88, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0,
- 88, 89, 1, 0, 0, 0, 89, 90, 1, 0, 0, 0, 90, 98, 3, 14, 7, 0, 91, 93, 5,
- 18, 0, 0, 92, 91, 1, 0, 0, 0, 93, 94, 1, 0, 0, 0, 94, 92, 1, 0, 0, 0, 94,
- 95, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 98, 3, 14, 7, 0, 97, 84, 1, 0,
- 0, 0, 97, 86, 1, 0, 0, 0, 97, 92, 1, 0, 0, 0, 98, 13, 1, 0, 0, 0, 99, 100,
- 6, 7, -1, 0, 100, 101, 3, 16, 8, 0, 101, 126, 1, 0, 0, 0, 102, 103, 10,
- 3, 0, 0, 103, 105, 5, 16, 0, 0, 104, 106, 5, 20, 0, 0, 105, 104, 1, 0,
- 0, 0, 105, 106, 1, 0, 0, 0, 106, 107, 1, 0, 0, 0, 107, 125, 5, 36, 0, 0,
- 108, 109, 10, 2, 0, 0, 109, 110, 5, 16, 0, 0, 110, 111, 5, 36, 0, 0, 111,
- 113, 5, 14, 0, 0, 112, 114, 3, 18, 9, 0, 113, 112, 1, 0, 0, 0, 113, 114,
- 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115, 125, 5, 15, 0, 0, 116, 117, 10,
- 1, 0, 0, 117, 119, 5, 10, 0, 0, 118, 120, 5, 20, 0, 0, 119, 118, 1, 0,
- 0, 0, 119, 120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 3, 2, 1, 0,
- 122, 123, 5, 11, 0, 0, 123, 125, 1, 0, 0, 0, 124, 102, 1, 0, 0, 0, 124,
- 108, 1, 0, 0, 0, 124, 116, 1, 0, 0, 0, 125, 128, 1, 0, 0, 0, 126, 124,
- 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 15, 1, 0, 0, 0, 128, 126, 1, 0,
- 0, 0, 129, 131, 5, 16, 0, 0, 130, 129, 1, 0, 0, 0, 130, 131, 1, 0, 0, 0,
- 131, 132, 1, 0, 0, 0, 132, 138, 5, 36, 0, 0, 133, 135, 5, 14, 0, 0, 134,
- 136, 3, 18, 9, 0, 135, 134, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 137,
- 1, 0, 0, 0, 137, 139, 5, 15, 0, 0, 138, 133, 1, 0, 0, 0, 138, 139, 1, 0,
- 0, 0, 139, 181, 1, 0, 0, 0, 140, 141, 5, 14, 0, 0, 141, 142, 3, 2, 1, 0,
- 142, 143, 5, 15, 0, 0, 143, 181, 1, 0, 0, 0, 144, 146, 5, 10, 0, 0, 145,
- 147, 3, 20, 10, 0, 146, 145, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 149,
- 1, 0, 0, 0, 148, 150, 5, 17, 0, 0, 149, 148, 1, 0, 0, 0, 149, 150, 1, 0,
- 0, 0, 150, 151, 1, 0, 0, 0, 151, 181, 5, 11, 0, 0, 152, 154, 5, 12, 0,
- 0, 153, 155, 3, 26, 13, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0,
- 155, 157, 1, 0, 0, 0, 156, 158, 5, 17, 0, 0, 157, 156, 1, 0, 0, 0, 157,
- 158, 1, 0, 0, 0, 158, 159, 1, 0, 0, 0, 159, 181, 5, 13, 0, 0, 160, 162,
- 5, 16, 0, 0, 161, 160, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 1, 0,
- 0, 0, 163, 168, 5, 36, 0, 0, 164, 165, 5, 16, 0, 0, 165, 167, 5, 36, 0,
- 0, 166, 164, 1, 0, 0, 0, 167, 170, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 168,
- 169, 1, 0, 0, 0, 169, 171, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 171, 173,
- 5, 12, 0, 0, 172, 174, 3, 22, 11, 0, 173, 172, 1, 0, 0, 0, 173, 174, 1,
- 0, 0, 0, 174, 176, 1, 0, 0, 0, 175, 177, 5, 17, 0, 0, 176, 175, 1, 0, 0,
- 0, 176, 177, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 181, 5, 13, 0, 0, 179,
- 181, 3, 30, 15, 0, 180, 130, 1, 0, 0, 0, 180, 140, 1, 0, 0, 0, 180, 144,
- 1, 0, 0, 0, 180, 152, 1, 0, 0, 0, 180, 161, 1, 0, 0, 0, 180, 179, 1, 0,
- 0, 0, 181, 17, 1, 0, 0, 0, 182, 187, 3, 2, 1, 0, 183, 184, 5, 17, 0, 0,
- 184, 186, 3, 2, 1, 0, 185, 183, 1, 0, 0, 0, 186, 189, 1, 0, 0, 0, 187,
- 185, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 19, 1, 0, 0, 0, 189, 187, 1,
- 0, 0, 0, 190, 195, 3, 28, 14, 0, 191, 192, 5, 17, 0, 0, 192, 194, 3, 28,
- 14, 0, 193, 191, 1, 0, 0, 0, 194, 197, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0,
- 195, 196, 1, 0, 0, 0, 196, 21, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 199,
- 3, 24, 12, 0, 199, 200, 5, 21, 0, 0, 200, 208, 3, 2, 1, 0, 201, 202, 5,
- 17, 0, 0, 202, 203, 3, 24, 12, 0, 203, 204, 5, 21, 0, 0, 204, 205, 3, 2,
- 1, 0, 205, 207, 1, 0, 0, 0, 206, 201, 1, 0, 0, 0, 207, 210, 1, 0, 0, 0,
- 208, 206, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 23, 1, 0, 0, 0, 210, 208,
- 1, 0, 0, 0, 211, 213, 5, 20, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0,
- 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 36, 0, 0, 215, 25, 1, 0, 0, 0,
- 216, 217, 3, 28, 14, 0, 217, 218, 5, 21, 0, 0, 218, 226, 3, 2, 1, 0, 219,
- 220, 5, 17, 0, 0, 220, 221, 3, 28, 14, 0, 221, 222, 5, 21, 0, 0, 222, 223,
- 3, 2, 1, 0, 223, 225, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 225, 228, 1, 0,
- 0, 0, 226, 224, 1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 27, 1, 0, 0, 0,
- 228, 226, 1, 0, 0, 0, 229, 231, 5, 20, 0, 0, 230, 229, 1, 0, 0, 0, 230,
- 231, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 233, 3, 2, 1, 0, 233, 29, 1,
- 0, 0, 0, 234, 236, 5, 18, 0, 0, 235, 234, 1, 0, 0, 0, 235, 236, 1, 0, 0,
- 0, 236, 237, 1, 0, 0, 0, 237, 249, 5, 32, 0, 0, 238, 249, 5, 33, 0, 0,
- 239, 241, 5, 18, 0, 0, 240, 239, 1, 0, 0, 0, 240, 241, 1, 0, 0, 0, 241,
- 242, 1, 0, 0, 0, 242, 249, 5, 31, 0, 0, 243, 249, 5, 34, 0, 0, 244, 249,
- 5, 35, 0, 0, 245, 249, 5, 26, 0, 0, 246, 249, 5, 27, 0, 0, 247, 249, 5,
- 28, 0, 0, 248, 235, 1, 0, 0, 0, 248, 238, 1, 0, 0, 0, 248, 240, 1, 0, 0,
- 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 248, 245, 1, 0, 0, 0, 248,
- 246, 1, 0, 0, 0, 248, 247, 1, 0, 0, 0, 249, 31, 1, 0, 0, 0, 35, 41, 48,
- 56, 67, 79, 81, 88, 94, 97, 105, 113, 119, 124, 126, 130, 135, 138, 146,
- 149, 154, 157, 161, 168, 173, 176, 180, 187, 195, 208, 212, 226, 230, 235,
- 240, 248,
-}
- deserializer := antlr.NewATNDeserializer(nil)
- staticData.atn = deserializer.Deserialize(staticData.serializedATN)
- atn := staticData.atn
- staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
- decisionToDFA := staticData.decisionToDFA
- for index, state := range atn.DecisionToState {
- decisionToDFA[index] = antlr.NewDFA(state, index)
- }
-}
-
-// CELParserInit initializes any static state used to implement CELParser. By default the
-// static state used to implement the parser is lazily initialized during the first call to
-// NewCELParser(). You can call this function if you wish to initialize the static state ahead
-// of time.
-func CELParserInit() {
- staticData := &CELParserStaticData
- staticData.once.Do(celParserInit)
-}
-
-// NewCELParser produces a new parser instance for the optional input antlr.TokenStream.
-func NewCELParser(input antlr.TokenStream) *CELParser {
- CELParserInit()
- this := new(CELParser)
- this.BaseParser = antlr.NewBaseParser(input)
- staticData := &CELParserStaticData
- this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
- this.RuleNames = staticData.RuleNames
- this.LiteralNames = staticData.LiteralNames
- this.SymbolicNames = staticData.SymbolicNames
- this.GrammarFileName = "CEL.g4"
-
- return this
-}
-
-
-// CELParser tokens.
-const (
- CELParserEOF = antlr.TokenEOF
- CELParserEQUALS = 1
- CELParserNOT_EQUALS = 2
- CELParserIN = 3
- CELParserLESS = 4
- CELParserLESS_EQUALS = 5
- CELParserGREATER_EQUALS = 6
- CELParserGREATER = 7
- CELParserLOGICAL_AND = 8
- CELParserLOGICAL_OR = 9
- CELParserLBRACKET = 10
- CELParserRPRACKET = 11
- CELParserLBRACE = 12
- CELParserRBRACE = 13
- CELParserLPAREN = 14
- CELParserRPAREN = 15
- CELParserDOT = 16
- CELParserCOMMA = 17
- CELParserMINUS = 18
- CELParserEXCLAM = 19
- CELParserQUESTIONMARK = 20
- CELParserCOLON = 21
- CELParserPLUS = 22
- CELParserSTAR = 23
- CELParserSLASH = 24
- CELParserPERCENT = 25
- CELParserCEL_TRUE = 26
- CELParserCEL_FALSE = 27
- CELParserNUL = 28
- CELParserWHITESPACE = 29
- CELParserCOMMENT = 30
- CELParserNUM_FLOAT = 31
- CELParserNUM_INT = 32
- CELParserNUM_UINT = 33
- CELParserSTRING = 34
- CELParserBYTES = 35
- CELParserIDENTIFIER = 36
-)
-
-// CELParser rules.
-const (
- CELParserRULE_start = 0
- CELParserRULE_expr = 1
- CELParserRULE_conditionalOr = 2
- CELParserRULE_conditionalAnd = 3
- CELParserRULE_relation = 4
- CELParserRULE_calc = 5
- CELParserRULE_unary = 6
- CELParserRULE_member = 7
- CELParserRULE_primary = 8
- CELParserRULE_exprList = 9
- CELParserRULE_listInit = 10
- CELParserRULE_fieldInitializerList = 11
- CELParserRULE_optField = 12
- CELParserRULE_mapInitializerList = 13
- CELParserRULE_optExpr = 14
- CELParserRULE_literal = 15
-)
-
-// IStartContext is an interface to support dynamic dispatch.
-type IStartContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetE returns the e rule contexts.
- GetE() IExprContext
-
-
- // SetE sets the e rule contexts.
- SetE(IExprContext)
-
-
- // Getter signatures
- EOF() antlr.TerminalNode
- Expr() IExprContext
-
- // IsStartContext differentiates from other interfaces.
- IsStartContext()
-}
-
-type StartContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- e IExprContext
-}
-
-func NewEmptyStartContext() *StartContext {
- var p = new(StartContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_start
- return p
-}
-
-func InitEmptyStartContext(p *StartContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_start
-}
-
-func (*StartContext) IsStartContext() {}
-
-func NewStartContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *StartContext {
- var p = new(StartContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_start
-
- return p
-}
-
-func (s *StartContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *StartContext) GetE() IExprContext { return s.e }
-
-
-func (s *StartContext) SetE(v IExprContext) { s.e = v }
-
-
-func (s *StartContext) EOF() antlr.TerminalNode {
- return s.GetToken(CELParserEOF, 0)
-}
-
-func (s *StartContext) Expr() IExprContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprContext)
-}
-
-func (s *StartContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *StartContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *StartContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterStart(s)
- }
-}
-
-func (s *StartContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitStart(s)
- }
-}
-
-func (s *StartContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitStart(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) Start_() (localctx IStartContext) {
- localctx = NewStartContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 0, CELParserRULE_start)
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(32)
-
- var _x = p.Expr()
-
-
- localctx.(*StartContext).e = _x
- }
- {
- p.SetState(33)
- p.Match(CELParserEOF)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IExprContext is an interface to support dynamic dispatch.
-type IExprContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetOp returns the op token.
- GetOp() antlr.Token
-
-
- // SetOp sets the op token.
- SetOp(antlr.Token)
-
-
- // GetE returns the e rule contexts.
- GetE() IConditionalOrContext
-
- // GetE1 returns the e1 rule contexts.
- GetE1() IConditionalOrContext
-
- // GetE2 returns the e2 rule contexts.
- GetE2() IExprContext
-
-
- // SetE sets the e rule contexts.
- SetE(IConditionalOrContext)
-
- // SetE1 sets the e1 rule contexts.
- SetE1(IConditionalOrContext)
-
- // SetE2 sets the e2 rule contexts.
- SetE2(IExprContext)
-
-
- // Getter signatures
- AllConditionalOr() []IConditionalOrContext
- ConditionalOr(i int) IConditionalOrContext
- COLON() antlr.TerminalNode
- QUESTIONMARK() antlr.TerminalNode
- Expr() IExprContext
-
- // IsExprContext differentiates from other interfaces.
- IsExprContext()
-}
-
-type ExprContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- e IConditionalOrContext
- op antlr.Token
- e1 IConditionalOrContext
- e2 IExprContext
-}
-
-func NewEmptyExprContext() *ExprContext {
- var p = new(ExprContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_expr
- return p
-}
-
-func InitEmptyExprContext(p *ExprContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_expr
-}
-
-func (*ExprContext) IsExprContext() {}
-
-func NewExprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExprContext {
- var p = new(ExprContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_expr
-
- return p
-}
-
-func (s *ExprContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *ExprContext) GetOp() antlr.Token { return s.op }
-
-
-func (s *ExprContext) SetOp(v antlr.Token) { s.op = v }
-
-
-func (s *ExprContext) GetE() IConditionalOrContext { return s.e }
-
-func (s *ExprContext) GetE1() IConditionalOrContext { return s.e1 }
-
-func (s *ExprContext) GetE2() IExprContext { return s.e2 }
-
-
-func (s *ExprContext) SetE(v IConditionalOrContext) { s.e = v }
-
-func (s *ExprContext) SetE1(v IConditionalOrContext) { s.e1 = v }
-
-func (s *ExprContext) SetE2(v IExprContext) { s.e2 = v }
-
-
-func (s *ExprContext) AllConditionalOr() []IConditionalOrContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IConditionalOrContext); ok {
- len++
- }
- }
-
- tst := make([]IConditionalOrContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IConditionalOrContext); ok {
- tst[i] = t.(IConditionalOrContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *ExprContext) ConditionalOr(i int) IConditionalOrContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IConditionalOrContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IConditionalOrContext)
-}
-
-func (s *ExprContext) COLON() antlr.TerminalNode {
- return s.GetToken(CELParserCOLON, 0)
-}
-
-func (s *ExprContext) QUESTIONMARK() antlr.TerminalNode {
- return s.GetToken(CELParserQUESTIONMARK, 0)
-}
-
-func (s *ExprContext) Expr() IExprContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprContext)
-}
-
-func (s *ExprContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *ExprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *ExprContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterExpr(s)
- }
-}
-
-func (s *ExprContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitExpr(s)
- }
-}
-
-func (s *ExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitExpr(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) Expr() (localctx IExprContext) {
- localctx = NewExprContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 2, CELParserRULE_expr)
- var _la int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(35)
-
- var _x = p.ConditionalOr()
-
-
- localctx.(*ExprContext).e = _x
- }
- p.SetState(41)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserQUESTIONMARK {
- {
- p.SetState(36)
-
- var _m = p.Match(CELParserQUESTIONMARK)
-
- localctx.(*ExprContext).op = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(37)
-
- var _x = p.ConditionalOr()
-
-
- localctx.(*ExprContext).e1 = _x
- }
- {
- p.SetState(38)
- p.Match(CELParserCOLON)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(39)
-
- var _x = p.Expr()
-
-
- localctx.(*ExprContext).e2 = _x
- }
-
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IConditionalOrContext is an interface to support dynamic dispatch.
-type IConditionalOrContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetS9 returns the s9 token.
- GetS9() antlr.Token
-
-
- // SetS9 sets the s9 token.
- SetS9(antlr.Token)
-
-
- // GetOps returns the ops token list.
- GetOps() []antlr.Token
-
-
- // SetOps sets the ops token list.
- SetOps([]antlr.Token)
-
-
- // GetE returns the e rule contexts.
- GetE() IConditionalAndContext
-
- // Get_conditionalAnd returns the _conditionalAnd rule contexts.
- Get_conditionalAnd() IConditionalAndContext
-
-
- // SetE sets the e rule contexts.
- SetE(IConditionalAndContext)
-
- // Set_conditionalAnd sets the _conditionalAnd rule contexts.
- Set_conditionalAnd(IConditionalAndContext)
-
-
- // GetE1 returns the e1 rule context list.
- GetE1() []IConditionalAndContext
-
-
- // SetE1 sets the e1 rule context list.
- SetE1([]IConditionalAndContext)
-
-
- // Getter signatures
- AllConditionalAnd() []IConditionalAndContext
- ConditionalAnd(i int) IConditionalAndContext
- AllLOGICAL_OR() []antlr.TerminalNode
- LOGICAL_OR(i int) antlr.TerminalNode
-
- // IsConditionalOrContext differentiates from other interfaces.
- IsConditionalOrContext()
-}
-
-type ConditionalOrContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- e IConditionalAndContext
- s9 antlr.Token
- ops []antlr.Token
- _conditionalAnd IConditionalAndContext
- e1 []IConditionalAndContext
-}
-
-func NewEmptyConditionalOrContext() *ConditionalOrContext {
- var p = new(ConditionalOrContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_conditionalOr
- return p
-}
-
-func InitEmptyConditionalOrContext(p *ConditionalOrContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_conditionalOr
-}
-
-func (*ConditionalOrContext) IsConditionalOrContext() {}
-
-func NewConditionalOrContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ConditionalOrContext {
- var p = new(ConditionalOrContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_conditionalOr
-
- return p
-}
-
-func (s *ConditionalOrContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *ConditionalOrContext) GetS9() antlr.Token { return s.s9 }
-
-
-func (s *ConditionalOrContext) SetS9(v antlr.Token) { s.s9 = v }
-
-
-func (s *ConditionalOrContext) GetOps() []antlr.Token { return s.ops }
-
-
-func (s *ConditionalOrContext) SetOps(v []antlr.Token) { s.ops = v }
-
-
-func (s *ConditionalOrContext) GetE() IConditionalAndContext { return s.e }
-
-func (s *ConditionalOrContext) Get_conditionalAnd() IConditionalAndContext { return s._conditionalAnd }
-
-
-func (s *ConditionalOrContext) SetE(v IConditionalAndContext) { s.e = v }
-
-func (s *ConditionalOrContext) Set_conditionalAnd(v IConditionalAndContext) { s._conditionalAnd = v }
-
-
-func (s *ConditionalOrContext) GetE1() []IConditionalAndContext { return s.e1 }
-
-
-func (s *ConditionalOrContext) SetE1(v []IConditionalAndContext) { s.e1 = v }
-
-
-func (s *ConditionalOrContext) AllConditionalAnd() []IConditionalAndContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IConditionalAndContext); ok {
- len++
- }
- }
-
- tst := make([]IConditionalAndContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IConditionalAndContext); ok {
- tst[i] = t.(IConditionalAndContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *ConditionalOrContext) ConditionalAnd(i int) IConditionalAndContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IConditionalAndContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IConditionalAndContext)
-}
-
-func (s *ConditionalOrContext) AllLOGICAL_OR() []antlr.TerminalNode {
- return s.GetTokens(CELParserLOGICAL_OR)
-}
-
-func (s *ConditionalOrContext) LOGICAL_OR(i int) antlr.TerminalNode {
- return s.GetToken(CELParserLOGICAL_OR, i)
-}
-
-func (s *ConditionalOrContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *ConditionalOrContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *ConditionalOrContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterConditionalOr(s)
- }
-}
-
-func (s *ConditionalOrContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitConditionalOr(s)
- }
-}
-
-func (s *ConditionalOrContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitConditionalOr(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) ConditionalOr() (localctx IConditionalOrContext) {
- localctx = NewConditionalOrContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 4, CELParserRULE_conditionalOr)
- var _la int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(43)
-
- var _x = p.ConditionalAnd()
-
-
- localctx.(*ConditionalOrContext).e = _x
- }
- p.SetState(48)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- for _la == CELParserLOGICAL_OR {
- {
- p.SetState(44)
-
- var _m = p.Match(CELParserLOGICAL_OR)
-
- localctx.(*ConditionalOrContext).s9 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*ConditionalOrContext).ops = append(localctx.(*ConditionalOrContext).ops, localctx.(*ConditionalOrContext).s9)
- {
- p.SetState(45)
-
- var _x = p.ConditionalAnd()
-
-
- localctx.(*ConditionalOrContext)._conditionalAnd = _x
- }
- localctx.(*ConditionalOrContext).e1 = append(localctx.(*ConditionalOrContext).e1, localctx.(*ConditionalOrContext)._conditionalAnd)
-
-
- p.SetState(50)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IConditionalAndContext is an interface to support dynamic dispatch.
-type IConditionalAndContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetS8 returns the s8 token.
- GetS8() antlr.Token
-
-
- // SetS8 sets the s8 token.
- SetS8(antlr.Token)
-
-
- // GetOps returns the ops token list.
- GetOps() []antlr.Token
-
-
- // SetOps sets the ops token list.
- SetOps([]antlr.Token)
-
-
- // GetE returns the e rule contexts.
- GetE() IRelationContext
-
- // Get_relation returns the _relation rule contexts.
- Get_relation() IRelationContext
-
-
- // SetE sets the e rule contexts.
- SetE(IRelationContext)
-
- // Set_relation sets the _relation rule contexts.
- Set_relation(IRelationContext)
-
-
- // GetE1 returns the e1 rule context list.
- GetE1() []IRelationContext
-
-
- // SetE1 sets the e1 rule context list.
- SetE1([]IRelationContext)
-
-
- // Getter signatures
- AllRelation() []IRelationContext
- Relation(i int) IRelationContext
- AllLOGICAL_AND() []antlr.TerminalNode
- LOGICAL_AND(i int) antlr.TerminalNode
-
- // IsConditionalAndContext differentiates from other interfaces.
- IsConditionalAndContext()
-}
-
-type ConditionalAndContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- e IRelationContext
- s8 antlr.Token
- ops []antlr.Token
- _relation IRelationContext
- e1 []IRelationContext
-}
-
-func NewEmptyConditionalAndContext() *ConditionalAndContext {
- var p = new(ConditionalAndContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_conditionalAnd
- return p
-}
-
-func InitEmptyConditionalAndContext(p *ConditionalAndContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_conditionalAnd
-}
-
-func (*ConditionalAndContext) IsConditionalAndContext() {}
-
-func NewConditionalAndContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ConditionalAndContext {
- var p = new(ConditionalAndContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_conditionalAnd
-
- return p
-}
-
-func (s *ConditionalAndContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *ConditionalAndContext) GetS8() antlr.Token { return s.s8 }
-
-
-func (s *ConditionalAndContext) SetS8(v antlr.Token) { s.s8 = v }
-
-
-func (s *ConditionalAndContext) GetOps() []antlr.Token { return s.ops }
-
-
-func (s *ConditionalAndContext) SetOps(v []antlr.Token) { s.ops = v }
-
-
-func (s *ConditionalAndContext) GetE() IRelationContext { return s.e }
-
-func (s *ConditionalAndContext) Get_relation() IRelationContext { return s._relation }
-
-
-func (s *ConditionalAndContext) SetE(v IRelationContext) { s.e = v }
-
-func (s *ConditionalAndContext) Set_relation(v IRelationContext) { s._relation = v }
-
-
-func (s *ConditionalAndContext) GetE1() []IRelationContext { return s.e1 }
-
-
-func (s *ConditionalAndContext) SetE1(v []IRelationContext) { s.e1 = v }
-
-
-func (s *ConditionalAndContext) AllRelation() []IRelationContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IRelationContext); ok {
- len++
- }
- }
-
- tst := make([]IRelationContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IRelationContext); ok {
- tst[i] = t.(IRelationContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *ConditionalAndContext) Relation(i int) IRelationContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IRelationContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IRelationContext)
-}
-
-func (s *ConditionalAndContext) AllLOGICAL_AND() []antlr.TerminalNode {
- return s.GetTokens(CELParserLOGICAL_AND)
-}
-
-func (s *ConditionalAndContext) LOGICAL_AND(i int) antlr.TerminalNode {
- return s.GetToken(CELParserLOGICAL_AND, i)
-}
-
-func (s *ConditionalAndContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *ConditionalAndContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *ConditionalAndContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterConditionalAnd(s)
- }
-}
-
-func (s *ConditionalAndContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitConditionalAnd(s)
- }
-}
-
-func (s *ConditionalAndContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitConditionalAnd(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) ConditionalAnd() (localctx IConditionalAndContext) {
- localctx = NewConditionalAndContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 6, CELParserRULE_conditionalAnd)
- var _la int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(51)
-
- var _x = p.relation(0)
-
- localctx.(*ConditionalAndContext).e = _x
- }
- p.SetState(56)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- for _la == CELParserLOGICAL_AND {
- {
- p.SetState(52)
-
- var _m = p.Match(CELParserLOGICAL_AND)
-
- localctx.(*ConditionalAndContext).s8 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*ConditionalAndContext).ops = append(localctx.(*ConditionalAndContext).ops, localctx.(*ConditionalAndContext).s8)
- {
- p.SetState(53)
-
- var _x = p.relation(0)
-
- localctx.(*ConditionalAndContext)._relation = _x
- }
- localctx.(*ConditionalAndContext).e1 = append(localctx.(*ConditionalAndContext).e1, localctx.(*ConditionalAndContext)._relation)
-
-
- p.SetState(58)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IRelationContext is an interface to support dynamic dispatch.
-type IRelationContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetOp returns the op token.
- GetOp() antlr.Token
-
-
- // SetOp sets the op token.
- SetOp(antlr.Token)
-
-
- // Getter signatures
- Calc() ICalcContext
- AllRelation() []IRelationContext
- Relation(i int) IRelationContext
- LESS() antlr.TerminalNode
- LESS_EQUALS() antlr.TerminalNode
- GREATER_EQUALS() antlr.TerminalNode
- GREATER() antlr.TerminalNode
- EQUALS() antlr.TerminalNode
- NOT_EQUALS() antlr.TerminalNode
- IN() antlr.TerminalNode
-
- // IsRelationContext differentiates from other interfaces.
- IsRelationContext()
-}
-
-type RelationContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- op antlr.Token
-}
-
-func NewEmptyRelationContext() *RelationContext {
- var p = new(RelationContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_relation
- return p
-}
-
-func InitEmptyRelationContext(p *RelationContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_relation
-}
-
-func (*RelationContext) IsRelationContext() {}
-
-func NewRelationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *RelationContext {
- var p = new(RelationContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_relation
-
- return p
-}
-
-func (s *RelationContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *RelationContext) GetOp() antlr.Token { return s.op }
-
-
-func (s *RelationContext) SetOp(v antlr.Token) { s.op = v }
-
-
-func (s *RelationContext) Calc() ICalcContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(ICalcContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(ICalcContext)
-}
-
-func (s *RelationContext) AllRelation() []IRelationContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IRelationContext); ok {
- len++
- }
- }
-
- tst := make([]IRelationContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IRelationContext); ok {
- tst[i] = t.(IRelationContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *RelationContext) Relation(i int) IRelationContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IRelationContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IRelationContext)
-}
-
-func (s *RelationContext) LESS() antlr.TerminalNode {
- return s.GetToken(CELParserLESS, 0)
-}
-
-func (s *RelationContext) LESS_EQUALS() antlr.TerminalNode {
- return s.GetToken(CELParserLESS_EQUALS, 0)
-}
-
-func (s *RelationContext) GREATER_EQUALS() antlr.TerminalNode {
- return s.GetToken(CELParserGREATER_EQUALS, 0)
-}
-
-func (s *RelationContext) GREATER() antlr.TerminalNode {
- return s.GetToken(CELParserGREATER, 0)
-}
-
-func (s *RelationContext) EQUALS() antlr.TerminalNode {
- return s.GetToken(CELParserEQUALS, 0)
-}
-
-func (s *RelationContext) NOT_EQUALS() antlr.TerminalNode {
- return s.GetToken(CELParserNOT_EQUALS, 0)
-}
-
-func (s *RelationContext) IN() antlr.TerminalNode {
- return s.GetToken(CELParserIN, 0)
-}
-
-func (s *RelationContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *RelationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *RelationContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterRelation(s)
- }
-}
-
-func (s *RelationContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitRelation(s)
- }
-}
-
-func (s *RelationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitRelation(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-
-func (p *CELParser) Relation() (localctx IRelationContext) {
- return p.relation(0)
-}
-
-func (p *CELParser) relation(_p int) (localctx IRelationContext) {
- var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext()
-
- _parentState := p.GetState()
- localctx = NewRelationContext(p, p.GetParserRuleContext(), _parentState)
- var _prevctx IRelationContext = localctx
- var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning.
- _startState := 8
- p.EnterRecursionRule(localctx, 8, CELParserRULE_relation, _p)
- var _la int
-
- var _alt int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(60)
- p.calc(0)
- }
-
- p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1))
- p.SetState(67)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 3, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
- if _alt == 1 {
- if p.GetParseListeners() != nil {
- p.TriggerExitRuleEvent()
- }
- _prevctx = localctx
- localctx = NewRelationContext(p, _parentctx, _parentState)
- p.PushNewRecursionContext(localctx, _startState, CELParserRULE_relation)
- p.SetState(62)
-
- if !(p.Precpred(p.GetParserRuleContext(), 1)) {
- p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", ""))
- goto errorExit
- }
- {
- p.SetState(63)
-
- var _lt = p.GetTokenStream().LT(1)
-
- localctx.(*RelationContext).op = _lt
-
- _la = p.GetTokenStream().LA(1)
-
- if !(((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 254) != 0)) {
- var _ri = p.GetErrorHandler().RecoverInline(p)
-
- localctx.(*RelationContext).op = _ri
- } else {
- p.GetErrorHandler().ReportMatch(p)
- p.Consume()
- }
- }
- {
- p.SetState(64)
- p.relation(2)
- }
-
-
- }
- p.SetState(69)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 3, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- }
-
-
-
- errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.UnrollRecursionContexts(_parentctx)
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// ICalcContext is an interface to support dynamic dispatch.
-type ICalcContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetOp returns the op token.
- GetOp() antlr.Token
-
-
- // SetOp sets the op token.
- SetOp(antlr.Token)
-
-
- // Getter signatures
- Unary() IUnaryContext
- AllCalc() []ICalcContext
- Calc(i int) ICalcContext
- STAR() antlr.TerminalNode
- SLASH() antlr.TerminalNode
- PERCENT() antlr.TerminalNode
- PLUS() antlr.TerminalNode
- MINUS() antlr.TerminalNode
-
- // IsCalcContext differentiates from other interfaces.
- IsCalcContext()
-}
-
-type CalcContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- op antlr.Token
-}
-
-func NewEmptyCalcContext() *CalcContext {
- var p = new(CalcContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_calc
- return p
-}
-
-func InitEmptyCalcContext(p *CalcContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_calc
-}
-
-func (*CalcContext) IsCalcContext() {}
-
-func NewCalcContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *CalcContext {
- var p = new(CalcContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_calc
-
- return p
-}
-
-func (s *CalcContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *CalcContext) GetOp() antlr.Token { return s.op }
-
-
-func (s *CalcContext) SetOp(v antlr.Token) { s.op = v }
-
-
-func (s *CalcContext) Unary() IUnaryContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IUnaryContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IUnaryContext)
-}
-
-func (s *CalcContext) AllCalc() []ICalcContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(ICalcContext); ok {
- len++
- }
- }
-
- tst := make([]ICalcContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(ICalcContext); ok {
- tst[i] = t.(ICalcContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *CalcContext) Calc(i int) ICalcContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(ICalcContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(ICalcContext)
-}
-
-func (s *CalcContext) STAR() antlr.TerminalNode {
- return s.GetToken(CELParserSTAR, 0)
-}
-
-func (s *CalcContext) SLASH() antlr.TerminalNode {
- return s.GetToken(CELParserSLASH, 0)
-}
-
-func (s *CalcContext) PERCENT() antlr.TerminalNode {
- return s.GetToken(CELParserPERCENT, 0)
-}
-
-func (s *CalcContext) PLUS() antlr.TerminalNode {
- return s.GetToken(CELParserPLUS, 0)
-}
-
-func (s *CalcContext) MINUS() antlr.TerminalNode {
- return s.GetToken(CELParserMINUS, 0)
-}
-
-func (s *CalcContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *CalcContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *CalcContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterCalc(s)
- }
-}
-
-func (s *CalcContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitCalc(s)
- }
-}
-
-func (s *CalcContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitCalc(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-
-func (p *CELParser) Calc() (localctx ICalcContext) {
- return p.calc(0)
-}
-
-func (p *CELParser) calc(_p int) (localctx ICalcContext) {
- var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext()
-
- _parentState := p.GetState()
- localctx = NewCalcContext(p, p.GetParserRuleContext(), _parentState)
- var _prevctx ICalcContext = localctx
- var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning.
- _startState := 10
- p.EnterRecursionRule(localctx, 10, CELParserRULE_calc, _p)
- var _la int
-
- var _alt int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(71)
- p.Unary()
- }
-
- p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1))
- p.SetState(81)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 5, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
- if _alt == 1 {
- if p.GetParseListeners() != nil {
- p.TriggerExitRuleEvent()
- }
- _prevctx = localctx
- p.SetState(79)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
-
- switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 4, p.GetParserRuleContext()) {
- case 1:
- localctx = NewCalcContext(p, _parentctx, _parentState)
- p.PushNewRecursionContext(localctx, _startState, CELParserRULE_calc)
- p.SetState(73)
-
- if !(p.Precpred(p.GetParserRuleContext(), 2)) {
- p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", ""))
- goto errorExit
- }
- {
- p.SetState(74)
-
- var _lt = p.GetTokenStream().LT(1)
-
- localctx.(*CalcContext).op = _lt
-
- _la = p.GetTokenStream().LA(1)
-
- if !(((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 58720256) != 0)) {
- var _ri = p.GetErrorHandler().RecoverInline(p)
-
- localctx.(*CalcContext).op = _ri
- } else {
- p.GetErrorHandler().ReportMatch(p)
- p.Consume()
- }
- }
- {
- p.SetState(75)
- p.calc(3)
- }
-
-
- case 2:
- localctx = NewCalcContext(p, _parentctx, _parentState)
- p.PushNewRecursionContext(localctx, _startState, CELParserRULE_calc)
- p.SetState(76)
-
- if !(p.Precpred(p.GetParserRuleContext(), 1)) {
- p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", ""))
- goto errorExit
- }
- {
- p.SetState(77)
-
- var _lt = p.GetTokenStream().LT(1)
-
- localctx.(*CalcContext).op = _lt
-
- _la = p.GetTokenStream().LA(1)
-
- if !(_la == CELParserMINUS || _la == CELParserPLUS) {
- var _ri = p.GetErrorHandler().RecoverInline(p)
-
- localctx.(*CalcContext).op = _ri
- } else {
- p.GetErrorHandler().ReportMatch(p)
- p.Consume()
- }
- }
- {
- p.SetState(78)
- p.calc(2)
- }
-
- case antlr.ATNInvalidAltNumber:
- goto errorExit
- }
-
- }
- p.SetState(83)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 5, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- }
-
-
-
- errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.UnrollRecursionContexts(_parentctx)
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IUnaryContext is an interface to support dynamic dispatch.
-type IUnaryContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
- // IsUnaryContext differentiates from other interfaces.
- IsUnaryContext()
-}
-
-type UnaryContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
-}
-
-func NewEmptyUnaryContext() *UnaryContext {
- var p = new(UnaryContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_unary
- return p
-}
-
-func InitEmptyUnaryContext(p *UnaryContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_unary
-}
-
-func (*UnaryContext) IsUnaryContext() {}
-
-func NewUnaryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *UnaryContext {
- var p = new(UnaryContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_unary
-
- return p
-}
-
-func (s *UnaryContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *UnaryContext) CopyAll(ctx *UnaryContext) {
- s.CopyFrom(&ctx.BaseParserRuleContext)
-}
-
-func (s *UnaryContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *UnaryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-
-
-type LogicalNotContext struct {
- UnaryContext
- s19 antlr.Token
- ops []antlr.Token
-}
-
-func NewLogicalNotContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LogicalNotContext {
- var p = new(LogicalNotContext)
-
- InitEmptyUnaryContext(&p.UnaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*UnaryContext))
-
- return p
-}
-
-
-func (s *LogicalNotContext) GetS19() antlr.Token { return s.s19 }
-
-
-func (s *LogicalNotContext) SetS19(v antlr.Token) { s.s19 = v }
-
-
-func (s *LogicalNotContext) GetOps() []antlr.Token { return s.ops }
-
-
-func (s *LogicalNotContext) SetOps(v []antlr.Token) { s.ops = v }
-
-func (s *LogicalNotContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *LogicalNotContext) Member() IMemberContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IMemberContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IMemberContext)
-}
-
-func (s *LogicalNotContext) AllEXCLAM() []antlr.TerminalNode {
- return s.GetTokens(CELParserEXCLAM)
-}
-
-func (s *LogicalNotContext) EXCLAM(i int) antlr.TerminalNode {
- return s.GetToken(CELParserEXCLAM, i)
-}
-
-
-func (s *LogicalNotContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterLogicalNot(s)
- }
-}
-
-func (s *LogicalNotContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitLogicalNot(s)
- }
-}
-
-func (s *LogicalNotContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitLogicalNot(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type MemberExprContext struct {
- UnaryContext
-}
-
-func NewMemberExprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *MemberExprContext {
- var p = new(MemberExprContext)
-
- InitEmptyUnaryContext(&p.UnaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*UnaryContext))
-
- return p
-}
-
-func (s *MemberExprContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *MemberExprContext) Member() IMemberContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IMemberContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IMemberContext)
-}
-
-
-func (s *MemberExprContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterMemberExpr(s)
- }
-}
-
-func (s *MemberExprContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitMemberExpr(s)
- }
-}
-
-func (s *MemberExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitMemberExpr(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type NegateContext struct {
- UnaryContext
- s18 antlr.Token
- ops []antlr.Token
-}
-
-func NewNegateContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *NegateContext {
- var p = new(NegateContext)
-
- InitEmptyUnaryContext(&p.UnaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*UnaryContext))
-
- return p
-}
-
-
-func (s *NegateContext) GetS18() antlr.Token { return s.s18 }
-
-
-func (s *NegateContext) SetS18(v antlr.Token) { s.s18 = v }
-
-
-func (s *NegateContext) GetOps() []antlr.Token { return s.ops }
-
-
-func (s *NegateContext) SetOps(v []antlr.Token) { s.ops = v }
-
-func (s *NegateContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *NegateContext) Member() IMemberContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IMemberContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IMemberContext)
-}
-
-func (s *NegateContext) AllMINUS() []antlr.TerminalNode {
- return s.GetTokens(CELParserMINUS)
-}
-
-func (s *NegateContext) MINUS(i int) antlr.TerminalNode {
- return s.GetToken(CELParserMINUS, i)
-}
-
-
-func (s *NegateContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterNegate(s)
- }
-}
-
-func (s *NegateContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitNegate(s)
- }
-}
-
-func (s *NegateContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitNegate(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-func (p *CELParser) Unary() (localctx IUnaryContext) {
- localctx = NewUnaryContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 12, CELParserRULE_unary)
- var _la int
-
- var _alt int
-
- p.SetState(97)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
-
- switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 8, p.GetParserRuleContext()) {
- case 1:
- localctx = NewMemberExprContext(p, localctx)
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(84)
- p.member(0)
- }
-
-
- case 2:
- localctx = NewLogicalNotContext(p, localctx)
- p.EnterOuterAlt(localctx, 2)
- p.SetState(86)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- for ok := true; ok; ok = _la == CELParserEXCLAM {
- {
- p.SetState(85)
-
- var _m = p.Match(CELParserEXCLAM)
-
- localctx.(*LogicalNotContext).s19 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*LogicalNotContext).ops = append(localctx.(*LogicalNotContext).ops, localctx.(*LogicalNotContext).s19)
-
-
- p.SetState(88)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
- }
- {
- p.SetState(90)
- p.member(0)
- }
-
-
- case 3:
- localctx = NewNegateContext(p, localctx)
- p.EnterOuterAlt(localctx, 3)
- p.SetState(92)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = 1
- for ok := true; ok; ok = _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
- switch _alt {
- case 1:
- {
- p.SetState(91)
-
- var _m = p.Match(CELParserMINUS)
-
- localctx.(*NegateContext).s18 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*NegateContext).ops = append(localctx.(*NegateContext).ops, localctx.(*NegateContext).s18)
-
-
-
-
- default:
- p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
- goto errorExit
- }
-
- p.SetState(94)
- p.GetErrorHandler().Sync(p)
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 7, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- }
- {
- p.SetState(96)
- p.member(0)
- }
-
- case antlr.ATNInvalidAltNumber:
- goto errorExit
- }
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IMemberContext is an interface to support dynamic dispatch.
-type IMemberContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
- // IsMemberContext differentiates from other interfaces.
- IsMemberContext()
-}
-
-type MemberContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
-}
-
-func NewEmptyMemberContext() *MemberContext {
- var p = new(MemberContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_member
- return p
-}
-
-func InitEmptyMemberContext(p *MemberContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_member
-}
-
-func (*MemberContext) IsMemberContext() {}
-
-func NewMemberContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *MemberContext {
- var p = new(MemberContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_member
-
- return p
-}
-
-func (s *MemberContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *MemberContext) CopyAll(ctx *MemberContext) {
- s.CopyFrom(&ctx.BaseParserRuleContext)
-}
-
-func (s *MemberContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *MemberContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-
-
-
-type MemberCallContext struct {
- MemberContext
- op antlr.Token
- id antlr.Token
- open antlr.Token
- args IExprListContext
-}
-
-func NewMemberCallContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *MemberCallContext {
- var p = new(MemberCallContext)
-
- InitEmptyMemberContext(&p.MemberContext)
- p.parser = parser
- p.CopyAll(ctx.(*MemberContext))
-
- return p
-}
-
-
-func (s *MemberCallContext) GetOp() antlr.Token { return s.op }
-
-func (s *MemberCallContext) GetId() antlr.Token { return s.id }
-
-func (s *MemberCallContext) GetOpen() antlr.Token { return s.open }
-
-
-func (s *MemberCallContext) SetOp(v antlr.Token) { s.op = v }
-
-func (s *MemberCallContext) SetId(v antlr.Token) { s.id = v }
-
-func (s *MemberCallContext) SetOpen(v antlr.Token) { s.open = v }
-
-
-func (s *MemberCallContext) GetArgs() IExprListContext { return s.args }
-
-
-func (s *MemberCallContext) SetArgs(v IExprListContext) { s.args = v }
-
-func (s *MemberCallContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *MemberCallContext) Member() IMemberContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IMemberContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IMemberContext)
-}
-
-func (s *MemberCallContext) RPAREN() antlr.TerminalNode {
- return s.GetToken(CELParserRPAREN, 0)
-}
-
-func (s *MemberCallContext) DOT() antlr.TerminalNode {
- return s.GetToken(CELParserDOT, 0)
-}
-
-func (s *MemberCallContext) IDENTIFIER() antlr.TerminalNode {
- return s.GetToken(CELParserIDENTIFIER, 0)
-}
-
-func (s *MemberCallContext) LPAREN() antlr.TerminalNode {
- return s.GetToken(CELParserLPAREN, 0)
-}
-
-func (s *MemberCallContext) ExprList() IExprListContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprListContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprListContext)
-}
-
-
-func (s *MemberCallContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterMemberCall(s)
- }
-}
-
-func (s *MemberCallContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitMemberCall(s)
- }
-}
-
-func (s *MemberCallContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitMemberCall(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type SelectContext struct {
- MemberContext
- op antlr.Token
- opt antlr.Token
- id antlr.Token
-}
-
-func NewSelectContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *SelectContext {
- var p = new(SelectContext)
-
- InitEmptyMemberContext(&p.MemberContext)
- p.parser = parser
- p.CopyAll(ctx.(*MemberContext))
-
- return p
-}
-
-
-func (s *SelectContext) GetOp() antlr.Token { return s.op }
-
-func (s *SelectContext) GetOpt() antlr.Token { return s.opt }
-
-func (s *SelectContext) GetId() antlr.Token { return s.id }
-
-
-func (s *SelectContext) SetOp(v antlr.Token) { s.op = v }
-
-func (s *SelectContext) SetOpt(v antlr.Token) { s.opt = v }
-
-func (s *SelectContext) SetId(v antlr.Token) { s.id = v }
-
-func (s *SelectContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *SelectContext) Member() IMemberContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IMemberContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IMemberContext)
-}
-
-func (s *SelectContext) DOT() antlr.TerminalNode {
- return s.GetToken(CELParserDOT, 0)
-}
-
-func (s *SelectContext) IDENTIFIER() antlr.TerminalNode {
- return s.GetToken(CELParserIDENTIFIER, 0)
-}
-
-func (s *SelectContext) QUESTIONMARK() antlr.TerminalNode {
- return s.GetToken(CELParserQUESTIONMARK, 0)
-}
-
-
-func (s *SelectContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterSelect(s)
- }
-}
-
-func (s *SelectContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitSelect(s)
- }
-}
-
-func (s *SelectContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitSelect(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type PrimaryExprContext struct {
- MemberContext
-}
-
-func NewPrimaryExprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *PrimaryExprContext {
- var p = new(PrimaryExprContext)
-
- InitEmptyMemberContext(&p.MemberContext)
- p.parser = parser
- p.CopyAll(ctx.(*MemberContext))
-
- return p
-}
-
-func (s *PrimaryExprContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *PrimaryExprContext) Primary() IPrimaryContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IPrimaryContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IPrimaryContext)
-}
-
-
-func (s *PrimaryExprContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterPrimaryExpr(s)
- }
-}
-
-func (s *PrimaryExprContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitPrimaryExpr(s)
- }
-}
-
-func (s *PrimaryExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitPrimaryExpr(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type IndexContext struct {
- MemberContext
- op antlr.Token
- opt antlr.Token
- index IExprContext
-}
-
-func NewIndexContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *IndexContext {
- var p = new(IndexContext)
-
- InitEmptyMemberContext(&p.MemberContext)
- p.parser = parser
- p.CopyAll(ctx.(*MemberContext))
-
- return p
-}
-
-
-func (s *IndexContext) GetOp() antlr.Token { return s.op }
-
-func (s *IndexContext) GetOpt() antlr.Token { return s.opt }
-
-
-func (s *IndexContext) SetOp(v antlr.Token) { s.op = v }
-
-func (s *IndexContext) SetOpt(v antlr.Token) { s.opt = v }
-
-
-func (s *IndexContext) GetIndex() IExprContext { return s.index }
-
-
-func (s *IndexContext) SetIndex(v IExprContext) { s.index = v }
-
-func (s *IndexContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *IndexContext) Member() IMemberContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IMemberContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IMemberContext)
-}
-
-func (s *IndexContext) RPRACKET() antlr.TerminalNode {
- return s.GetToken(CELParserRPRACKET, 0)
-}
-
-func (s *IndexContext) LBRACKET() antlr.TerminalNode {
- return s.GetToken(CELParserLBRACKET, 0)
-}
-
-func (s *IndexContext) Expr() IExprContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprContext)
-}
-
-func (s *IndexContext) QUESTIONMARK() antlr.TerminalNode {
- return s.GetToken(CELParserQUESTIONMARK, 0)
-}
-
-
-func (s *IndexContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterIndex(s)
- }
-}
-
-func (s *IndexContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitIndex(s)
- }
-}
-
-func (s *IndexContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitIndex(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-func (p *CELParser) Member() (localctx IMemberContext) {
- return p.member(0)
-}
-
-func (p *CELParser) member(_p int) (localctx IMemberContext) {
- var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext()
-
- _parentState := p.GetState()
- localctx = NewMemberContext(p, p.GetParserRuleContext(), _parentState)
- var _prevctx IMemberContext = localctx
- var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning.
- _startState := 14
- p.EnterRecursionRule(localctx, 14, CELParserRULE_member, _p)
- var _la int
-
- var _alt int
-
- p.EnterOuterAlt(localctx, 1)
- localctx = NewPrimaryExprContext(p, localctx)
- p.SetParserRuleContext(localctx)
- _prevctx = localctx
-
- {
- p.SetState(100)
- p.Primary()
- }
-
- p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1))
- p.SetState(126)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 13, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
- if _alt == 1 {
- if p.GetParseListeners() != nil {
- p.TriggerExitRuleEvent()
- }
- _prevctx = localctx
- p.SetState(124)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
-
- switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 12, p.GetParserRuleContext()) {
- case 1:
- localctx = NewSelectContext(p, NewMemberContext(p, _parentctx, _parentState))
- p.PushNewRecursionContext(localctx, _startState, CELParserRULE_member)
- p.SetState(102)
-
- if !(p.Precpred(p.GetParserRuleContext(), 3)) {
- p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 3)", ""))
- goto errorExit
- }
- {
- p.SetState(103)
-
- var _m = p.Match(CELParserDOT)
-
- localctx.(*SelectContext).op = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- p.SetState(105)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserQUESTIONMARK {
- {
- p.SetState(104)
-
- var _m = p.Match(CELParserQUESTIONMARK)
-
- localctx.(*SelectContext).opt = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(107)
-
- var _m = p.Match(CELParserIDENTIFIER)
-
- localctx.(*SelectContext).id = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 2:
- localctx = NewMemberCallContext(p, NewMemberContext(p, _parentctx, _parentState))
- p.PushNewRecursionContext(localctx, _startState, CELParserRULE_member)
- p.SetState(108)
-
- if !(p.Precpred(p.GetParserRuleContext(), 2)) {
- p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", ""))
- goto errorExit
- }
- {
- p.SetState(109)
-
- var _m = p.Match(CELParserDOT)
-
- localctx.(*MemberCallContext).op = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(110)
-
- var _m = p.Match(CELParserIDENTIFIER)
-
- localctx.(*MemberCallContext).id = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(111)
-
- var _m = p.Match(CELParserLPAREN)
-
- localctx.(*MemberCallContext).open = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- p.SetState(113)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if ((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 135762105344) != 0) {
- {
- p.SetState(112)
-
- var _x = p.ExprList()
-
-
- localctx.(*MemberCallContext).args = _x
- }
-
- }
- {
- p.SetState(115)
- p.Match(CELParserRPAREN)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 3:
- localctx = NewIndexContext(p, NewMemberContext(p, _parentctx, _parentState))
- p.PushNewRecursionContext(localctx, _startState, CELParserRULE_member)
- p.SetState(116)
-
- if !(p.Precpred(p.GetParserRuleContext(), 1)) {
- p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", ""))
- goto errorExit
- }
- {
- p.SetState(117)
-
- var _m = p.Match(CELParserLBRACKET)
-
- localctx.(*IndexContext).op = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- p.SetState(119)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserQUESTIONMARK {
- {
- p.SetState(118)
-
- var _m = p.Match(CELParserQUESTIONMARK)
-
- localctx.(*IndexContext).opt = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(121)
-
- var _x = p.Expr()
-
-
- localctx.(*IndexContext).index = _x
- }
- {
- p.SetState(122)
- p.Match(CELParserRPRACKET)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- case antlr.ATNInvalidAltNumber:
- goto errorExit
- }
-
- }
- p.SetState(128)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 13, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- }
-
-
-
- errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.UnrollRecursionContexts(_parentctx)
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IPrimaryContext is an interface to support dynamic dispatch.
-type IPrimaryContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
- // IsPrimaryContext differentiates from other interfaces.
- IsPrimaryContext()
-}
-
-type PrimaryContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
-}
-
-func NewEmptyPrimaryContext() *PrimaryContext {
- var p = new(PrimaryContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_primary
- return p
-}
-
-func InitEmptyPrimaryContext(p *PrimaryContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_primary
-}
-
-func (*PrimaryContext) IsPrimaryContext() {}
-
-func NewPrimaryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *PrimaryContext {
- var p = new(PrimaryContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_primary
-
- return p
-}
-
-func (s *PrimaryContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *PrimaryContext) CopyAll(ctx *PrimaryContext) {
- s.CopyFrom(&ctx.BaseParserRuleContext)
-}
-
-func (s *PrimaryContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *PrimaryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-
-
-type CreateListContext struct {
- PrimaryContext
- op antlr.Token
- elems IListInitContext
-}
-
-func NewCreateListContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *CreateListContext {
- var p = new(CreateListContext)
-
- InitEmptyPrimaryContext(&p.PrimaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*PrimaryContext))
-
- return p
-}
-
-
-func (s *CreateListContext) GetOp() antlr.Token { return s.op }
-
-
-func (s *CreateListContext) SetOp(v antlr.Token) { s.op = v }
-
-
-func (s *CreateListContext) GetElems() IListInitContext { return s.elems }
-
-
-func (s *CreateListContext) SetElems(v IListInitContext) { s.elems = v }
-
-func (s *CreateListContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *CreateListContext) RPRACKET() antlr.TerminalNode {
- return s.GetToken(CELParserRPRACKET, 0)
-}
-
-func (s *CreateListContext) LBRACKET() antlr.TerminalNode {
- return s.GetToken(CELParserLBRACKET, 0)
-}
-
-func (s *CreateListContext) COMMA() antlr.TerminalNode {
- return s.GetToken(CELParserCOMMA, 0)
-}
-
-func (s *CreateListContext) ListInit() IListInitContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IListInitContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IListInitContext)
-}
-
-
-func (s *CreateListContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterCreateList(s)
- }
-}
-
-func (s *CreateListContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitCreateList(s)
- }
-}
-
-func (s *CreateListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitCreateList(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type CreateStructContext struct {
- PrimaryContext
- op antlr.Token
- entries IMapInitializerListContext
-}
-
-func NewCreateStructContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *CreateStructContext {
- var p = new(CreateStructContext)
-
- InitEmptyPrimaryContext(&p.PrimaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*PrimaryContext))
-
- return p
-}
-
-
-func (s *CreateStructContext) GetOp() antlr.Token { return s.op }
-
-
-func (s *CreateStructContext) SetOp(v antlr.Token) { s.op = v }
-
-
-func (s *CreateStructContext) GetEntries() IMapInitializerListContext { return s.entries }
-
-
-func (s *CreateStructContext) SetEntries(v IMapInitializerListContext) { s.entries = v }
-
-func (s *CreateStructContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *CreateStructContext) RBRACE() antlr.TerminalNode {
- return s.GetToken(CELParserRBRACE, 0)
-}
-
-func (s *CreateStructContext) LBRACE() antlr.TerminalNode {
- return s.GetToken(CELParserLBRACE, 0)
-}
-
-func (s *CreateStructContext) COMMA() antlr.TerminalNode {
- return s.GetToken(CELParserCOMMA, 0)
-}
-
-func (s *CreateStructContext) MapInitializerList() IMapInitializerListContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IMapInitializerListContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IMapInitializerListContext)
-}
-
-
-func (s *CreateStructContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterCreateStruct(s)
- }
-}
-
-func (s *CreateStructContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitCreateStruct(s)
- }
-}
-
-func (s *CreateStructContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitCreateStruct(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type ConstantLiteralContext struct {
- PrimaryContext
-}
-
-func NewConstantLiteralContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ConstantLiteralContext {
- var p = new(ConstantLiteralContext)
-
- InitEmptyPrimaryContext(&p.PrimaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*PrimaryContext))
-
- return p
-}
-
-func (s *ConstantLiteralContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *ConstantLiteralContext) Literal() ILiteralContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(ILiteralContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(ILiteralContext)
-}
-
-
-func (s *ConstantLiteralContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterConstantLiteral(s)
- }
-}
-
-func (s *ConstantLiteralContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitConstantLiteral(s)
- }
-}
-
-func (s *ConstantLiteralContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitConstantLiteral(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type NestedContext struct {
- PrimaryContext
- e IExprContext
-}
-
-func NewNestedContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *NestedContext {
- var p = new(NestedContext)
-
- InitEmptyPrimaryContext(&p.PrimaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*PrimaryContext))
-
- return p
-}
-
-
-func (s *NestedContext) GetE() IExprContext { return s.e }
-
-
-func (s *NestedContext) SetE(v IExprContext) { s.e = v }
-
-func (s *NestedContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *NestedContext) LPAREN() antlr.TerminalNode {
- return s.GetToken(CELParserLPAREN, 0)
-}
-
-func (s *NestedContext) RPAREN() antlr.TerminalNode {
- return s.GetToken(CELParserRPAREN, 0)
-}
-
-func (s *NestedContext) Expr() IExprContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprContext)
-}
-
-
-func (s *NestedContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterNested(s)
- }
-}
-
-func (s *NestedContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitNested(s)
- }
-}
-
-func (s *NestedContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitNested(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type CreateMessageContext struct {
- PrimaryContext
- leadingDot antlr.Token
- _IDENTIFIER antlr.Token
- ids []antlr.Token
- s16 antlr.Token
- ops []antlr.Token
- op antlr.Token
- entries IFieldInitializerListContext
-}
-
-func NewCreateMessageContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *CreateMessageContext {
- var p = new(CreateMessageContext)
-
- InitEmptyPrimaryContext(&p.PrimaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*PrimaryContext))
-
- return p
-}
-
-
-func (s *CreateMessageContext) GetLeadingDot() antlr.Token { return s.leadingDot }
-
-func (s *CreateMessageContext) Get_IDENTIFIER() antlr.Token { return s._IDENTIFIER }
-
-func (s *CreateMessageContext) GetS16() antlr.Token { return s.s16 }
-
-func (s *CreateMessageContext) GetOp() antlr.Token { return s.op }
-
-
-func (s *CreateMessageContext) SetLeadingDot(v antlr.Token) { s.leadingDot = v }
-
-func (s *CreateMessageContext) Set_IDENTIFIER(v antlr.Token) { s._IDENTIFIER = v }
-
-func (s *CreateMessageContext) SetS16(v antlr.Token) { s.s16 = v }
-
-func (s *CreateMessageContext) SetOp(v antlr.Token) { s.op = v }
-
-
-func (s *CreateMessageContext) GetIds() []antlr.Token { return s.ids }
-
-func (s *CreateMessageContext) GetOps() []antlr.Token { return s.ops }
-
-
-func (s *CreateMessageContext) SetIds(v []antlr.Token) { s.ids = v }
-
-func (s *CreateMessageContext) SetOps(v []antlr.Token) { s.ops = v }
-
-
-func (s *CreateMessageContext) GetEntries() IFieldInitializerListContext { return s.entries }
-
-
-func (s *CreateMessageContext) SetEntries(v IFieldInitializerListContext) { s.entries = v }
-
-func (s *CreateMessageContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *CreateMessageContext) RBRACE() antlr.TerminalNode {
- return s.GetToken(CELParserRBRACE, 0)
-}
-
-func (s *CreateMessageContext) AllIDENTIFIER() []antlr.TerminalNode {
- return s.GetTokens(CELParserIDENTIFIER)
-}
-
-func (s *CreateMessageContext) IDENTIFIER(i int) antlr.TerminalNode {
- return s.GetToken(CELParserIDENTIFIER, i)
-}
-
-func (s *CreateMessageContext) LBRACE() antlr.TerminalNode {
- return s.GetToken(CELParserLBRACE, 0)
-}
-
-func (s *CreateMessageContext) COMMA() antlr.TerminalNode {
- return s.GetToken(CELParserCOMMA, 0)
-}
-
-func (s *CreateMessageContext) AllDOT() []antlr.TerminalNode {
- return s.GetTokens(CELParserDOT)
-}
-
-func (s *CreateMessageContext) DOT(i int) antlr.TerminalNode {
- return s.GetToken(CELParserDOT, i)
-}
-
-func (s *CreateMessageContext) FieldInitializerList() IFieldInitializerListContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IFieldInitializerListContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IFieldInitializerListContext)
-}
-
-
-func (s *CreateMessageContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterCreateMessage(s)
- }
-}
-
-func (s *CreateMessageContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitCreateMessage(s)
- }
-}
-
-func (s *CreateMessageContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitCreateMessage(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type IdentOrGlobalCallContext struct {
- PrimaryContext
- leadingDot antlr.Token
- id antlr.Token
- op antlr.Token
- args IExprListContext
-}
-
-func NewIdentOrGlobalCallContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *IdentOrGlobalCallContext {
- var p = new(IdentOrGlobalCallContext)
-
- InitEmptyPrimaryContext(&p.PrimaryContext)
- p.parser = parser
- p.CopyAll(ctx.(*PrimaryContext))
-
- return p
-}
-
-
-func (s *IdentOrGlobalCallContext) GetLeadingDot() antlr.Token { return s.leadingDot }
-
-func (s *IdentOrGlobalCallContext) GetId() antlr.Token { return s.id }
-
-func (s *IdentOrGlobalCallContext) GetOp() antlr.Token { return s.op }
-
-
-func (s *IdentOrGlobalCallContext) SetLeadingDot(v antlr.Token) { s.leadingDot = v }
-
-func (s *IdentOrGlobalCallContext) SetId(v antlr.Token) { s.id = v }
-
-func (s *IdentOrGlobalCallContext) SetOp(v antlr.Token) { s.op = v }
-
-
-func (s *IdentOrGlobalCallContext) GetArgs() IExprListContext { return s.args }
-
-
-func (s *IdentOrGlobalCallContext) SetArgs(v IExprListContext) { s.args = v }
-
-func (s *IdentOrGlobalCallContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *IdentOrGlobalCallContext) IDENTIFIER() antlr.TerminalNode {
- return s.GetToken(CELParserIDENTIFIER, 0)
-}
-
-func (s *IdentOrGlobalCallContext) RPAREN() antlr.TerminalNode {
- return s.GetToken(CELParserRPAREN, 0)
-}
-
-func (s *IdentOrGlobalCallContext) DOT() antlr.TerminalNode {
- return s.GetToken(CELParserDOT, 0)
-}
-
-func (s *IdentOrGlobalCallContext) LPAREN() antlr.TerminalNode {
- return s.GetToken(CELParserLPAREN, 0)
-}
-
-func (s *IdentOrGlobalCallContext) ExprList() IExprListContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprListContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprListContext)
-}
-
-
-func (s *IdentOrGlobalCallContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterIdentOrGlobalCall(s)
- }
-}
-
-func (s *IdentOrGlobalCallContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitIdentOrGlobalCall(s)
- }
-}
-
-func (s *IdentOrGlobalCallContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitIdentOrGlobalCall(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-func (p *CELParser) Primary() (localctx IPrimaryContext) {
- localctx = NewPrimaryContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 16, CELParserRULE_primary)
- var _la int
-
- p.SetState(180)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
-
- switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 25, p.GetParserRuleContext()) {
- case 1:
- localctx = NewIdentOrGlobalCallContext(p, localctx)
- p.EnterOuterAlt(localctx, 1)
- p.SetState(130)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserDOT {
- {
- p.SetState(129)
-
- var _m = p.Match(CELParserDOT)
-
- localctx.(*IdentOrGlobalCallContext).leadingDot = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(132)
-
- var _m = p.Match(CELParserIDENTIFIER)
-
- localctx.(*IdentOrGlobalCallContext).id = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- p.SetState(138)
- p.GetErrorHandler().Sync(p)
-
-
- if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 16, p.GetParserRuleContext()) == 1 {
- {
- p.SetState(133)
-
- var _m = p.Match(CELParserLPAREN)
-
- localctx.(*IdentOrGlobalCallContext).op = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- p.SetState(135)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if ((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 135762105344) != 0) {
- {
- p.SetState(134)
-
- var _x = p.ExprList()
-
-
- localctx.(*IdentOrGlobalCallContext).args = _x
- }
-
- }
- {
- p.SetState(137)
- p.Match(CELParserRPAREN)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- } else if p.HasError() { // JIM
- goto errorExit
- }
-
-
- case 2:
- localctx = NewNestedContext(p, localctx)
- p.EnterOuterAlt(localctx, 2)
- {
- p.SetState(140)
- p.Match(CELParserLPAREN)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(141)
-
- var _x = p.Expr()
-
-
- localctx.(*NestedContext).e = _x
- }
- {
- p.SetState(142)
- p.Match(CELParserRPAREN)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 3:
- localctx = NewCreateListContext(p, localctx)
- p.EnterOuterAlt(localctx, 3)
- {
- p.SetState(144)
-
- var _m = p.Match(CELParserLBRACKET)
-
- localctx.(*CreateListContext).op = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- p.SetState(146)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if ((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 135763153920) != 0) {
- {
- p.SetState(145)
-
- var _x = p.ListInit()
-
-
- localctx.(*CreateListContext).elems = _x
- }
-
- }
- p.SetState(149)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserCOMMA {
- {
- p.SetState(148)
- p.Match(CELParserCOMMA)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(151)
- p.Match(CELParserRPRACKET)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 4:
- localctx = NewCreateStructContext(p, localctx)
- p.EnterOuterAlt(localctx, 4)
- {
- p.SetState(152)
-
- var _m = p.Match(CELParserLBRACE)
-
- localctx.(*CreateStructContext).op = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- p.SetState(154)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if ((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 135763153920) != 0) {
- {
- p.SetState(153)
-
- var _x = p.MapInitializerList()
-
-
- localctx.(*CreateStructContext).entries = _x
- }
-
- }
- p.SetState(157)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserCOMMA {
- {
- p.SetState(156)
- p.Match(CELParserCOMMA)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(159)
- p.Match(CELParserRBRACE)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 5:
- localctx = NewCreateMessageContext(p, localctx)
- p.EnterOuterAlt(localctx, 5)
- p.SetState(161)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserDOT {
- {
- p.SetState(160)
-
- var _m = p.Match(CELParserDOT)
-
- localctx.(*CreateMessageContext).leadingDot = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(163)
-
- var _m = p.Match(CELParserIDENTIFIER)
-
- localctx.(*CreateMessageContext)._IDENTIFIER = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*CreateMessageContext).ids = append(localctx.(*CreateMessageContext).ids, localctx.(*CreateMessageContext)._IDENTIFIER)
- p.SetState(168)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- for _la == CELParserDOT {
- {
- p.SetState(164)
-
- var _m = p.Match(CELParserDOT)
-
- localctx.(*CreateMessageContext).s16 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*CreateMessageContext).ops = append(localctx.(*CreateMessageContext).ops, localctx.(*CreateMessageContext).s16)
- {
- p.SetState(165)
-
- var _m = p.Match(CELParserIDENTIFIER)
-
- localctx.(*CreateMessageContext)._IDENTIFIER = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*CreateMessageContext).ids = append(localctx.(*CreateMessageContext).ids, localctx.(*CreateMessageContext)._IDENTIFIER)
-
-
- p.SetState(170)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
- }
- {
- p.SetState(171)
-
- var _m = p.Match(CELParserLBRACE)
-
- localctx.(*CreateMessageContext).op = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- p.SetState(173)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserQUESTIONMARK || _la == CELParserIDENTIFIER {
- {
- p.SetState(172)
-
- var _x = p.FieldInitializerList()
-
-
- localctx.(*CreateMessageContext).entries = _x
- }
-
- }
- p.SetState(176)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserCOMMA {
- {
- p.SetState(175)
- p.Match(CELParserCOMMA)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(178)
- p.Match(CELParserRBRACE)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 6:
- localctx = NewConstantLiteralContext(p, localctx)
- p.EnterOuterAlt(localctx, 6)
- {
- p.SetState(179)
- p.Literal()
- }
-
- case antlr.ATNInvalidAltNumber:
- goto errorExit
- }
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IExprListContext is an interface to support dynamic dispatch.
-type IExprListContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // Get_expr returns the _expr rule contexts.
- Get_expr() IExprContext
-
-
- // Set_expr sets the _expr rule contexts.
- Set_expr(IExprContext)
-
-
- // GetE returns the e rule context list.
- GetE() []IExprContext
-
-
- // SetE sets the e rule context list.
- SetE([]IExprContext)
-
-
- // Getter signatures
- AllExpr() []IExprContext
- Expr(i int) IExprContext
- AllCOMMA() []antlr.TerminalNode
- COMMA(i int) antlr.TerminalNode
-
- // IsExprListContext differentiates from other interfaces.
- IsExprListContext()
-}
-
-type ExprListContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- _expr IExprContext
- e []IExprContext
-}
-
-func NewEmptyExprListContext() *ExprListContext {
- var p = new(ExprListContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_exprList
- return p
-}
-
-func InitEmptyExprListContext(p *ExprListContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_exprList
-}
-
-func (*ExprListContext) IsExprListContext() {}
-
-func NewExprListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExprListContext {
- var p = new(ExprListContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_exprList
-
- return p
-}
-
-func (s *ExprListContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *ExprListContext) Get_expr() IExprContext { return s._expr }
-
-
-func (s *ExprListContext) Set_expr(v IExprContext) { s._expr = v }
-
-
-func (s *ExprListContext) GetE() []IExprContext { return s.e }
-
-
-func (s *ExprListContext) SetE(v []IExprContext) { s.e = v }
-
-
-func (s *ExprListContext) AllExpr() []IExprContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IExprContext); ok {
- len++
- }
- }
-
- tst := make([]IExprContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IExprContext); ok {
- tst[i] = t.(IExprContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *ExprListContext) Expr(i int) IExprContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprContext)
-}
-
-func (s *ExprListContext) AllCOMMA() []antlr.TerminalNode {
- return s.GetTokens(CELParserCOMMA)
-}
-
-func (s *ExprListContext) COMMA(i int) antlr.TerminalNode {
- return s.GetToken(CELParserCOMMA, i)
-}
-
-func (s *ExprListContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *ExprListContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *ExprListContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterExprList(s)
- }
-}
-
-func (s *ExprListContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitExprList(s)
- }
-}
-
-func (s *ExprListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitExprList(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) ExprList() (localctx IExprListContext) {
- localctx = NewExprListContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 18, CELParserRULE_exprList)
- var _la int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(182)
-
- var _x = p.Expr()
-
-
- localctx.(*ExprListContext)._expr = _x
- }
- localctx.(*ExprListContext).e = append(localctx.(*ExprListContext).e, localctx.(*ExprListContext)._expr)
- p.SetState(187)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- for _la == CELParserCOMMA {
- {
- p.SetState(183)
- p.Match(CELParserCOMMA)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(184)
-
- var _x = p.Expr()
-
-
- localctx.(*ExprListContext)._expr = _x
- }
- localctx.(*ExprListContext).e = append(localctx.(*ExprListContext).e, localctx.(*ExprListContext)._expr)
-
-
- p.SetState(189)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IListInitContext is an interface to support dynamic dispatch.
-type IListInitContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // Get_optExpr returns the _optExpr rule contexts.
- Get_optExpr() IOptExprContext
-
-
- // Set_optExpr sets the _optExpr rule contexts.
- Set_optExpr(IOptExprContext)
-
-
- // GetElems returns the elems rule context list.
- GetElems() []IOptExprContext
-
-
- // SetElems sets the elems rule context list.
- SetElems([]IOptExprContext)
-
-
- // Getter signatures
- AllOptExpr() []IOptExprContext
- OptExpr(i int) IOptExprContext
- AllCOMMA() []antlr.TerminalNode
- COMMA(i int) antlr.TerminalNode
-
- // IsListInitContext differentiates from other interfaces.
- IsListInitContext()
-}
-
-type ListInitContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- _optExpr IOptExprContext
- elems []IOptExprContext
-}
-
-func NewEmptyListInitContext() *ListInitContext {
- var p = new(ListInitContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_listInit
- return p
-}
-
-func InitEmptyListInitContext(p *ListInitContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_listInit
-}
-
-func (*ListInitContext) IsListInitContext() {}
-
-func NewListInitContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ListInitContext {
- var p = new(ListInitContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_listInit
-
- return p
-}
-
-func (s *ListInitContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *ListInitContext) Get_optExpr() IOptExprContext { return s._optExpr }
-
-
-func (s *ListInitContext) Set_optExpr(v IOptExprContext) { s._optExpr = v }
-
-
-func (s *ListInitContext) GetElems() []IOptExprContext { return s.elems }
-
-
-func (s *ListInitContext) SetElems(v []IOptExprContext) { s.elems = v }
-
-
-func (s *ListInitContext) AllOptExpr() []IOptExprContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IOptExprContext); ok {
- len++
- }
- }
-
- tst := make([]IOptExprContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IOptExprContext); ok {
- tst[i] = t.(IOptExprContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *ListInitContext) OptExpr(i int) IOptExprContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IOptExprContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IOptExprContext)
-}
-
-func (s *ListInitContext) AllCOMMA() []antlr.TerminalNode {
- return s.GetTokens(CELParserCOMMA)
-}
-
-func (s *ListInitContext) COMMA(i int) antlr.TerminalNode {
- return s.GetToken(CELParserCOMMA, i)
-}
-
-func (s *ListInitContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *ListInitContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *ListInitContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterListInit(s)
- }
-}
-
-func (s *ListInitContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitListInit(s)
- }
-}
-
-func (s *ListInitContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitListInit(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) ListInit() (localctx IListInitContext) {
- localctx = NewListInitContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 20, CELParserRULE_listInit)
- var _alt int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(190)
-
- var _x = p.OptExpr()
-
-
- localctx.(*ListInitContext)._optExpr = _x
- }
- localctx.(*ListInitContext).elems = append(localctx.(*ListInitContext).elems, localctx.(*ListInitContext)._optExpr)
- p.SetState(195)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 27, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
- if _alt == 1 {
- {
- p.SetState(191)
- p.Match(CELParserCOMMA)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(192)
-
- var _x = p.OptExpr()
-
-
- localctx.(*ListInitContext)._optExpr = _x
- }
- localctx.(*ListInitContext).elems = append(localctx.(*ListInitContext).elems, localctx.(*ListInitContext)._optExpr)
-
-
- }
- p.SetState(197)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 27, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IFieldInitializerListContext is an interface to support dynamic dispatch.
-type IFieldInitializerListContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetS21 returns the s21 token.
- GetS21() antlr.Token
-
-
- // SetS21 sets the s21 token.
- SetS21(antlr.Token)
-
-
- // GetCols returns the cols token list.
- GetCols() []antlr.Token
-
-
- // SetCols sets the cols token list.
- SetCols([]antlr.Token)
-
-
- // Get_optField returns the _optField rule contexts.
- Get_optField() IOptFieldContext
-
- // Get_expr returns the _expr rule contexts.
- Get_expr() IExprContext
-
-
- // Set_optField sets the _optField rule contexts.
- Set_optField(IOptFieldContext)
-
- // Set_expr sets the _expr rule contexts.
- Set_expr(IExprContext)
-
-
- // GetFields returns the fields rule context list.
- GetFields() []IOptFieldContext
-
- // GetValues returns the values rule context list.
- GetValues() []IExprContext
-
-
- // SetFields sets the fields rule context list.
- SetFields([]IOptFieldContext)
-
- // SetValues sets the values rule context list.
- SetValues([]IExprContext)
-
-
- // Getter signatures
- AllOptField() []IOptFieldContext
- OptField(i int) IOptFieldContext
- AllCOLON() []antlr.TerminalNode
- COLON(i int) antlr.TerminalNode
- AllExpr() []IExprContext
- Expr(i int) IExprContext
- AllCOMMA() []antlr.TerminalNode
- COMMA(i int) antlr.TerminalNode
-
- // IsFieldInitializerListContext differentiates from other interfaces.
- IsFieldInitializerListContext()
-}
-
-type FieldInitializerListContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- _optField IOptFieldContext
- fields []IOptFieldContext
- s21 antlr.Token
- cols []antlr.Token
- _expr IExprContext
- values []IExprContext
-}
-
-func NewEmptyFieldInitializerListContext() *FieldInitializerListContext {
- var p = new(FieldInitializerListContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_fieldInitializerList
- return p
-}
-
-func InitEmptyFieldInitializerListContext(p *FieldInitializerListContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_fieldInitializerList
-}
-
-func (*FieldInitializerListContext) IsFieldInitializerListContext() {}
-
-func NewFieldInitializerListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FieldInitializerListContext {
- var p = new(FieldInitializerListContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_fieldInitializerList
-
- return p
-}
-
-func (s *FieldInitializerListContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *FieldInitializerListContext) GetS21() antlr.Token { return s.s21 }
-
-
-func (s *FieldInitializerListContext) SetS21(v antlr.Token) { s.s21 = v }
-
-
-func (s *FieldInitializerListContext) GetCols() []antlr.Token { return s.cols }
-
-
-func (s *FieldInitializerListContext) SetCols(v []antlr.Token) { s.cols = v }
-
-
-func (s *FieldInitializerListContext) Get_optField() IOptFieldContext { return s._optField }
-
-func (s *FieldInitializerListContext) Get_expr() IExprContext { return s._expr }
-
-
-func (s *FieldInitializerListContext) Set_optField(v IOptFieldContext) { s._optField = v }
-
-func (s *FieldInitializerListContext) Set_expr(v IExprContext) { s._expr = v }
-
-
-func (s *FieldInitializerListContext) GetFields() []IOptFieldContext { return s.fields }
-
-func (s *FieldInitializerListContext) GetValues() []IExprContext { return s.values }
-
-
-func (s *FieldInitializerListContext) SetFields(v []IOptFieldContext) { s.fields = v }
-
-func (s *FieldInitializerListContext) SetValues(v []IExprContext) { s.values = v }
-
-
-func (s *FieldInitializerListContext) AllOptField() []IOptFieldContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IOptFieldContext); ok {
- len++
- }
- }
-
- tst := make([]IOptFieldContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IOptFieldContext); ok {
- tst[i] = t.(IOptFieldContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *FieldInitializerListContext) OptField(i int) IOptFieldContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IOptFieldContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IOptFieldContext)
-}
-
-func (s *FieldInitializerListContext) AllCOLON() []antlr.TerminalNode {
- return s.GetTokens(CELParserCOLON)
-}
-
-func (s *FieldInitializerListContext) COLON(i int) antlr.TerminalNode {
- return s.GetToken(CELParserCOLON, i)
-}
-
-func (s *FieldInitializerListContext) AllExpr() []IExprContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IExprContext); ok {
- len++
- }
- }
-
- tst := make([]IExprContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IExprContext); ok {
- tst[i] = t.(IExprContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *FieldInitializerListContext) Expr(i int) IExprContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprContext)
-}
-
-func (s *FieldInitializerListContext) AllCOMMA() []antlr.TerminalNode {
- return s.GetTokens(CELParserCOMMA)
-}
-
-func (s *FieldInitializerListContext) COMMA(i int) antlr.TerminalNode {
- return s.GetToken(CELParserCOMMA, i)
-}
-
-func (s *FieldInitializerListContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *FieldInitializerListContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *FieldInitializerListContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterFieldInitializerList(s)
- }
-}
-
-func (s *FieldInitializerListContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitFieldInitializerList(s)
- }
-}
-
-func (s *FieldInitializerListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitFieldInitializerList(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) FieldInitializerList() (localctx IFieldInitializerListContext) {
- localctx = NewFieldInitializerListContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 22, CELParserRULE_fieldInitializerList)
- var _alt int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(198)
-
- var _x = p.OptField()
-
-
- localctx.(*FieldInitializerListContext)._optField = _x
- }
- localctx.(*FieldInitializerListContext).fields = append(localctx.(*FieldInitializerListContext).fields, localctx.(*FieldInitializerListContext)._optField)
- {
- p.SetState(199)
-
- var _m = p.Match(CELParserCOLON)
-
- localctx.(*FieldInitializerListContext).s21 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*FieldInitializerListContext).cols = append(localctx.(*FieldInitializerListContext).cols, localctx.(*FieldInitializerListContext).s21)
- {
- p.SetState(200)
-
- var _x = p.Expr()
-
-
- localctx.(*FieldInitializerListContext)._expr = _x
- }
- localctx.(*FieldInitializerListContext).values = append(localctx.(*FieldInitializerListContext).values, localctx.(*FieldInitializerListContext)._expr)
- p.SetState(208)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 28, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
- if _alt == 1 {
- {
- p.SetState(201)
- p.Match(CELParserCOMMA)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(202)
-
- var _x = p.OptField()
-
-
- localctx.(*FieldInitializerListContext)._optField = _x
- }
- localctx.(*FieldInitializerListContext).fields = append(localctx.(*FieldInitializerListContext).fields, localctx.(*FieldInitializerListContext)._optField)
- {
- p.SetState(203)
-
- var _m = p.Match(CELParserCOLON)
-
- localctx.(*FieldInitializerListContext).s21 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*FieldInitializerListContext).cols = append(localctx.(*FieldInitializerListContext).cols, localctx.(*FieldInitializerListContext).s21)
- {
- p.SetState(204)
-
- var _x = p.Expr()
-
-
- localctx.(*FieldInitializerListContext)._expr = _x
- }
- localctx.(*FieldInitializerListContext).values = append(localctx.(*FieldInitializerListContext).values, localctx.(*FieldInitializerListContext)._expr)
-
-
- }
- p.SetState(210)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 28, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IOptFieldContext is an interface to support dynamic dispatch.
-type IOptFieldContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetOpt returns the opt token.
- GetOpt() antlr.Token
-
-
- // SetOpt sets the opt token.
- SetOpt(antlr.Token)
-
-
- // Getter signatures
- IDENTIFIER() antlr.TerminalNode
- QUESTIONMARK() antlr.TerminalNode
-
- // IsOptFieldContext differentiates from other interfaces.
- IsOptFieldContext()
-}
-
-type OptFieldContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- opt antlr.Token
-}
-
-func NewEmptyOptFieldContext() *OptFieldContext {
- var p = new(OptFieldContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_optField
- return p
-}
-
-func InitEmptyOptFieldContext(p *OptFieldContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_optField
-}
-
-func (*OptFieldContext) IsOptFieldContext() {}
-
-func NewOptFieldContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *OptFieldContext {
- var p = new(OptFieldContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_optField
-
- return p
-}
-
-func (s *OptFieldContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *OptFieldContext) GetOpt() antlr.Token { return s.opt }
-
-
-func (s *OptFieldContext) SetOpt(v antlr.Token) { s.opt = v }
-
-
-func (s *OptFieldContext) IDENTIFIER() antlr.TerminalNode {
- return s.GetToken(CELParserIDENTIFIER, 0)
-}
-
-func (s *OptFieldContext) QUESTIONMARK() antlr.TerminalNode {
- return s.GetToken(CELParserQUESTIONMARK, 0)
-}
-
-func (s *OptFieldContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *OptFieldContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *OptFieldContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterOptField(s)
- }
-}
-
-func (s *OptFieldContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitOptField(s)
- }
-}
-
-func (s *OptFieldContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitOptField(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) OptField() (localctx IOptFieldContext) {
- localctx = NewOptFieldContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 24, CELParserRULE_optField)
- var _la int
-
- p.EnterOuterAlt(localctx, 1)
- p.SetState(212)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserQUESTIONMARK {
- {
- p.SetState(211)
-
- var _m = p.Match(CELParserQUESTIONMARK)
-
- localctx.(*OptFieldContext).opt = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(214)
- p.Match(CELParserIDENTIFIER)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IMapInitializerListContext is an interface to support dynamic dispatch.
-type IMapInitializerListContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetS21 returns the s21 token.
- GetS21() antlr.Token
-
-
- // SetS21 sets the s21 token.
- SetS21(antlr.Token)
-
-
- // GetCols returns the cols token list.
- GetCols() []antlr.Token
-
-
- // SetCols sets the cols token list.
- SetCols([]antlr.Token)
-
-
- // Get_optExpr returns the _optExpr rule contexts.
- Get_optExpr() IOptExprContext
-
- // Get_expr returns the _expr rule contexts.
- Get_expr() IExprContext
-
-
- // Set_optExpr sets the _optExpr rule contexts.
- Set_optExpr(IOptExprContext)
-
- // Set_expr sets the _expr rule contexts.
- Set_expr(IExprContext)
-
-
- // GetKeys returns the keys rule context list.
- GetKeys() []IOptExprContext
-
- // GetValues returns the values rule context list.
- GetValues() []IExprContext
-
-
- // SetKeys sets the keys rule context list.
- SetKeys([]IOptExprContext)
-
- // SetValues sets the values rule context list.
- SetValues([]IExprContext)
-
-
- // Getter signatures
- AllOptExpr() []IOptExprContext
- OptExpr(i int) IOptExprContext
- AllCOLON() []antlr.TerminalNode
- COLON(i int) antlr.TerminalNode
- AllExpr() []IExprContext
- Expr(i int) IExprContext
- AllCOMMA() []antlr.TerminalNode
- COMMA(i int) antlr.TerminalNode
-
- // IsMapInitializerListContext differentiates from other interfaces.
- IsMapInitializerListContext()
-}
-
-type MapInitializerListContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- _optExpr IOptExprContext
- keys []IOptExprContext
- s21 antlr.Token
- cols []antlr.Token
- _expr IExprContext
- values []IExprContext
-}
-
-func NewEmptyMapInitializerListContext() *MapInitializerListContext {
- var p = new(MapInitializerListContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_mapInitializerList
- return p
-}
-
-func InitEmptyMapInitializerListContext(p *MapInitializerListContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_mapInitializerList
-}
-
-func (*MapInitializerListContext) IsMapInitializerListContext() {}
-
-func NewMapInitializerListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *MapInitializerListContext {
- var p = new(MapInitializerListContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_mapInitializerList
-
- return p
-}
-
-func (s *MapInitializerListContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *MapInitializerListContext) GetS21() antlr.Token { return s.s21 }
-
-
-func (s *MapInitializerListContext) SetS21(v antlr.Token) { s.s21 = v }
-
-
-func (s *MapInitializerListContext) GetCols() []antlr.Token { return s.cols }
-
-
-func (s *MapInitializerListContext) SetCols(v []antlr.Token) { s.cols = v }
-
-
-func (s *MapInitializerListContext) Get_optExpr() IOptExprContext { return s._optExpr }
-
-func (s *MapInitializerListContext) Get_expr() IExprContext { return s._expr }
-
-
-func (s *MapInitializerListContext) Set_optExpr(v IOptExprContext) { s._optExpr = v }
-
-func (s *MapInitializerListContext) Set_expr(v IExprContext) { s._expr = v }
-
-
-func (s *MapInitializerListContext) GetKeys() []IOptExprContext { return s.keys }
-
-func (s *MapInitializerListContext) GetValues() []IExprContext { return s.values }
-
-
-func (s *MapInitializerListContext) SetKeys(v []IOptExprContext) { s.keys = v }
-
-func (s *MapInitializerListContext) SetValues(v []IExprContext) { s.values = v }
-
-
-func (s *MapInitializerListContext) AllOptExpr() []IOptExprContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IOptExprContext); ok {
- len++
- }
- }
-
- tst := make([]IOptExprContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IOptExprContext); ok {
- tst[i] = t.(IOptExprContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *MapInitializerListContext) OptExpr(i int) IOptExprContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IOptExprContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IOptExprContext)
-}
-
-func (s *MapInitializerListContext) AllCOLON() []antlr.TerminalNode {
- return s.GetTokens(CELParserCOLON)
-}
-
-func (s *MapInitializerListContext) COLON(i int) antlr.TerminalNode {
- return s.GetToken(CELParserCOLON, i)
-}
-
-func (s *MapInitializerListContext) AllExpr() []IExprContext {
- children := s.GetChildren()
- len := 0
- for _, ctx := range children {
- if _, ok := ctx.(IExprContext); ok {
- len++
- }
- }
-
- tst := make([]IExprContext, len)
- i := 0
- for _, ctx := range children {
- if t, ok := ctx.(IExprContext); ok {
- tst[i] = t.(IExprContext)
- i++
- }
- }
-
- return tst
-}
-
-func (s *MapInitializerListContext) Expr(i int) IExprContext {
- var t antlr.RuleContext;
- j := 0
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprContext); ok {
- if j == i {
- t = ctx.(antlr.RuleContext);
- break
- }
- j++
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprContext)
-}
-
-func (s *MapInitializerListContext) AllCOMMA() []antlr.TerminalNode {
- return s.GetTokens(CELParserCOMMA)
-}
-
-func (s *MapInitializerListContext) COMMA(i int) antlr.TerminalNode {
- return s.GetToken(CELParserCOMMA, i)
-}
-
-func (s *MapInitializerListContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *MapInitializerListContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *MapInitializerListContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterMapInitializerList(s)
- }
-}
-
-func (s *MapInitializerListContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitMapInitializerList(s)
- }
-}
-
-func (s *MapInitializerListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitMapInitializerList(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) MapInitializerList() (localctx IMapInitializerListContext) {
- localctx = NewMapInitializerListContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 26, CELParserRULE_mapInitializerList)
- var _alt int
-
- p.EnterOuterAlt(localctx, 1)
- {
- p.SetState(216)
-
- var _x = p.OptExpr()
-
-
- localctx.(*MapInitializerListContext)._optExpr = _x
- }
- localctx.(*MapInitializerListContext).keys = append(localctx.(*MapInitializerListContext).keys, localctx.(*MapInitializerListContext)._optExpr)
- {
- p.SetState(217)
-
- var _m = p.Match(CELParserCOLON)
-
- localctx.(*MapInitializerListContext).s21 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*MapInitializerListContext).cols = append(localctx.(*MapInitializerListContext).cols, localctx.(*MapInitializerListContext).s21)
- {
- p.SetState(218)
-
- var _x = p.Expr()
-
-
- localctx.(*MapInitializerListContext)._expr = _x
- }
- localctx.(*MapInitializerListContext).values = append(localctx.(*MapInitializerListContext).values, localctx.(*MapInitializerListContext)._expr)
- p.SetState(226)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 30, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
- if _alt == 1 {
- {
- p.SetState(219)
- p.Match(CELParserCOMMA)
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- {
- p.SetState(220)
-
- var _x = p.OptExpr()
-
-
- localctx.(*MapInitializerListContext)._optExpr = _x
- }
- localctx.(*MapInitializerListContext).keys = append(localctx.(*MapInitializerListContext).keys, localctx.(*MapInitializerListContext)._optExpr)
- {
- p.SetState(221)
-
- var _m = p.Match(CELParserCOLON)
-
- localctx.(*MapInitializerListContext).s21 = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
- localctx.(*MapInitializerListContext).cols = append(localctx.(*MapInitializerListContext).cols, localctx.(*MapInitializerListContext).s21)
- {
- p.SetState(222)
-
- var _x = p.Expr()
-
-
- localctx.(*MapInitializerListContext)._expr = _x
- }
- localctx.(*MapInitializerListContext).values = append(localctx.(*MapInitializerListContext).values, localctx.(*MapInitializerListContext)._expr)
-
-
- }
- p.SetState(228)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 30, p.GetParserRuleContext())
- if p.HasError() {
- goto errorExit
- }
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// IOptExprContext is an interface to support dynamic dispatch.
-type IOptExprContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
-
- // GetOpt returns the opt token.
- GetOpt() antlr.Token
-
-
- // SetOpt sets the opt token.
- SetOpt(antlr.Token)
-
-
- // GetE returns the e rule contexts.
- GetE() IExprContext
-
-
- // SetE sets the e rule contexts.
- SetE(IExprContext)
-
-
- // Getter signatures
- Expr() IExprContext
- QUESTIONMARK() antlr.TerminalNode
-
- // IsOptExprContext differentiates from other interfaces.
- IsOptExprContext()
-}
-
-type OptExprContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
- opt antlr.Token
- e IExprContext
-}
-
-func NewEmptyOptExprContext() *OptExprContext {
- var p = new(OptExprContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_optExpr
- return p
-}
-
-func InitEmptyOptExprContext(p *OptExprContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_optExpr
-}
-
-func (*OptExprContext) IsOptExprContext() {}
-
-func NewOptExprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *OptExprContext {
- var p = new(OptExprContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_optExpr
-
- return p
-}
-
-func (s *OptExprContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *OptExprContext) GetOpt() antlr.Token { return s.opt }
-
-
-func (s *OptExprContext) SetOpt(v antlr.Token) { s.opt = v }
-
-
-func (s *OptExprContext) GetE() IExprContext { return s.e }
-
-
-func (s *OptExprContext) SetE(v IExprContext) { s.e = v }
-
-
-func (s *OptExprContext) Expr() IExprContext {
- var t antlr.RuleContext;
- for _, ctx := range s.GetChildren() {
- if _, ok := ctx.(IExprContext); ok {
- t = ctx.(antlr.RuleContext);
- break
- }
- }
-
- if t == nil {
- return nil
- }
-
- return t.(IExprContext)
-}
-
-func (s *OptExprContext) QUESTIONMARK() antlr.TerminalNode {
- return s.GetToken(CELParserQUESTIONMARK, 0)
-}
-
-func (s *OptExprContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *OptExprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-func (s *OptExprContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterOptExpr(s)
- }
-}
-
-func (s *OptExprContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitOptExpr(s)
- }
-}
-
-func (s *OptExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitOptExpr(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-
-func (p *CELParser) OptExpr() (localctx IOptExprContext) {
- localctx = NewOptExprContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 28, CELParserRULE_optExpr)
- var _la int
-
- p.EnterOuterAlt(localctx, 1)
- p.SetState(230)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserQUESTIONMARK {
- {
- p.SetState(229)
-
- var _m = p.Match(CELParserQUESTIONMARK)
-
- localctx.(*OptExprContext).opt = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(232)
-
- var _x = p.Expr()
-
-
- localctx.(*OptExprContext).e = _x
- }
-
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-// ILiteralContext is an interface to support dynamic dispatch.
-type ILiteralContext interface {
- antlr.ParserRuleContext
-
- // GetParser returns the parser.
- GetParser() antlr.Parser
- // IsLiteralContext differentiates from other interfaces.
- IsLiteralContext()
-}
-
-type LiteralContext struct {
- antlr.BaseParserRuleContext
- parser antlr.Parser
-}
-
-func NewEmptyLiteralContext() *LiteralContext {
- var p = new(LiteralContext)
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_literal
- return p
-}
-
-func InitEmptyLiteralContext(p *LiteralContext) {
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
- p.RuleIndex = CELParserRULE_literal
-}
-
-func (*LiteralContext) IsLiteralContext() {}
-
-func NewLiteralContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *LiteralContext {
- var p = new(LiteralContext)
-
- antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
-
- p.parser = parser
- p.RuleIndex = CELParserRULE_literal
-
- return p
-}
-
-func (s *LiteralContext) GetParser() antlr.Parser { return s.parser }
-
-func (s *LiteralContext) CopyAll(ctx *LiteralContext) {
- s.CopyFrom(&ctx.BaseParserRuleContext)
-}
-
-func (s *LiteralContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *LiteralContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
- return antlr.TreesStringTree(s, ruleNames, recog)
-}
-
-
-
-
-type BytesContext struct {
- LiteralContext
- tok antlr.Token
-}
-
-func NewBytesContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *BytesContext {
- var p = new(BytesContext)
-
- InitEmptyLiteralContext(&p.LiteralContext)
- p.parser = parser
- p.CopyAll(ctx.(*LiteralContext))
-
- return p
-}
-
-
-func (s *BytesContext) GetTok() antlr.Token { return s.tok }
-
-
-func (s *BytesContext) SetTok(v antlr.Token) { s.tok = v }
-
-func (s *BytesContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *BytesContext) BYTES() antlr.TerminalNode {
- return s.GetToken(CELParserBYTES, 0)
-}
-
-
-func (s *BytesContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterBytes(s)
- }
-}
-
-func (s *BytesContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitBytes(s)
- }
-}
-
-func (s *BytesContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitBytes(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type UintContext struct {
- LiteralContext
- tok antlr.Token
-}
-
-func NewUintContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *UintContext {
- var p = new(UintContext)
-
- InitEmptyLiteralContext(&p.LiteralContext)
- p.parser = parser
- p.CopyAll(ctx.(*LiteralContext))
-
- return p
-}
-
-
-func (s *UintContext) GetTok() antlr.Token { return s.tok }
-
-
-func (s *UintContext) SetTok(v antlr.Token) { s.tok = v }
-
-func (s *UintContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *UintContext) NUM_UINT() antlr.TerminalNode {
- return s.GetToken(CELParserNUM_UINT, 0)
-}
-
-
-func (s *UintContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterUint(s)
- }
-}
-
-func (s *UintContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitUint(s)
- }
-}
-
-func (s *UintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitUint(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type NullContext struct {
- LiteralContext
- tok antlr.Token
-}
-
-func NewNullContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *NullContext {
- var p = new(NullContext)
-
- InitEmptyLiteralContext(&p.LiteralContext)
- p.parser = parser
- p.CopyAll(ctx.(*LiteralContext))
-
- return p
-}
-
-
-func (s *NullContext) GetTok() antlr.Token { return s.tok }
-
-
-func (s *NullContext) SetTok(v antlr.Token) { s.tok = v }
-
-func (s *NullContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *NullContext) NUL() antlr.TerminalNode {
- return s.GetToken(CELParserNUL, 0)
-}
-
-
-func (s *NullContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterNull(s)
- }
-}
-
-func (s *NullContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitNull(s)
- }
-}
-
-func (s *NullContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitNull(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type BoolFalseContext struct {
- LiteralContext
- tok antlr.Token
-}
-
-func NewBoolFalseContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *BoolFalseContext {
- var p = new(BoolFalseContext)
-
- InitEmptyLiteralContext(&p.LiteralContext)
- p.parser = parser
- p.CopyAll(ctx.(*LiteralContext))
-
- return p
-}
-
-
-func (s *BoolFalseContext) GetTok() antlr.Token { return s.tok }
-
-
-func (s *BoolFalseContext) SetTok(v antlr.Token) { s.tok = v }
-
-func (s *BoolFalseContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *BoolFalseContext) CEL_FALSE() antlr.TerminalNode {
- return s.GetToken(CELParserCEL_FALSE, 0)
-}
-
-
-func (s *BoolFalseContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterBoolFalse(s)
- }
-}
-
-func (s *BoolFalseContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitBoolFalse(s)
- }
-}
-
-func (s *BoolFalseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitBoolFalse(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type StringContext struct {
- LiteralContext
- tok antlr.Token
-}
-
-func NewStringContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *StringContext {
- var p = new(StringContext)
-
- InitEmptyLiteralContext(&p.LiteralContext)
- p.parser = parser
- p.CopyAll(ctx.(*LiteralContext))
-
- return p
-}
-
-
-func (s *StringContext) GetTok() antlr.Token { return s.tok }
-
-
-func (s *StringContext) SetTok(v antlr.Token) { s.tok = v }
-
-func (s *StringContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *StringContext) STRING() antlr.TerminalNode {
- return s.GetToken(CELParserSTRING, 0)
-}
-
-
-func (s *StringContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterString(s)
- }
-}
-
-func (s *StringContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitString(s)
- }
-}
-
-func (s *StringContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitString(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type DoubleContext struct {
- LiteralContext
- sign antlr.Token
- tok antlr.Token
-}
-
-func NewDoubleContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *DoubleContext {
- var p = new(DoubleContext)
-
- InitEmptyLiteralContext(&p.LiteralContext)
- p.parser = parser
- p.CopyAll(ctx.(*LiteralContext))
-
- return p
-}
-
-
-func (s *DoubleContext) GetSign() antlr.Token { return s.sign }
-
-func (s *DoubleContext) GetTok() antlr.Token { return s.tok }
-
-
-func (s *DoubleContext) SetSign(v antlr.Token) { s.sign = v }
-
-func (s *DoubleContext) SetTok(v antlr.Token) { s.tok = v }
-
-func (s *DoubleContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *DoubleContext) NUM_FLOAT() antlr.TerminalNode {
- return s.GetToken(CELParserNUM_FLOAT, 0)
-}
-
-func (s *DoubleContext) MINUS() antlr.TerminalNode {
- return s.GetToken(CELParserMINUS, 0)
-}
-
-
-func (s *DoubleContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterDouble(s)
- }
-}
-
-func (s *DoubleContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitDouble(s)
- }
-}
-
-func (s *DoubleContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitDouble(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type BoolTrueContext struct {
- LiteralContext
- tok antlr.Token
-}
-
-func NewBoolTrueContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *BoolTrueContext {
- var p = new(BoolTrueContext)
-
- InitEmptyLiteralContext(&p.LiteralContext)
- p.parser = parser
- p.CopyAll(ctx.(*LiteralContext))
-
- return p
-}
-
-
-func (s *BoolTrueContext) GetTok() antlr.Token { return s.tok }
-
-
-func (s *BoolTrueContext) SetTok(v antlr.Token) { s.tok = v }
-
-func (s *BoolTrueContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *BoolTrueContext) CEL_TRUE() antlr.TerminalNode {
- return s.GetToken(CELParserCEL_TRUE, 0)
-}
-
-
-func (s *BoolTrueContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterBoolTrue(s)
- }
-}
-
-func (s *BoolTrueContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitBoolTrue(s)
- }
-}
-
-func (s *BoolTrueContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitBoolTrue(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-type IntContext struct {
- LiteralContext
- sign antlr.Token
- tok antlr.Token
-}
-
-func NewIntContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *IntContext {
- var p = new(IntContext)
-
- InitEmptyLiteralContext(&p.LiteralContext)
- p.parser = parser
- p.CopyAll(ctx.(*LiteralContext))
-
- return p
-}
-
-
-func (s *IntContext) GetSign() antlr.Token { return s.sign }
-
-func (s *IntContext) GetTok() antlr.Token { return s.tok }
-
-
-func (s *IntContext) SetSign(v antlr.Token) { s.sign = v }
-
-func (s *IntContext) SetTok(v antlr.Token) { s.tok = v }
-
-func (s *IntContext) GetRuleContext() antlr.RuleContext {
- return s
-}
-
-func (s *IntContext) NUM_INT() antlr.TerminalNode {
- return s.GetToken(CELParserNUM_INT, 0)
-}
-
-func (s *IntContext) MINUS() antlr.TerminalNode {
- return s.GetToken(CELParserMINUS, 0)
-}
-
-
-func (s *IntContext) EnterRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.EnterInt(s)
- }
-}
-
-func (s *IntContext) ExitRule(listener antlr.ParseTreeListener) {
- if listenerT, ok := listener.(CELListener); ok {
- listenerT.ExitInt(s)
- }
-}
-
-func (s *IntContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
- switch t := visitor.(type) {
- case CELVisitor:
- return t.VisitInt(s)
-
- default:
- return t.VisitChildren(s)
- }
-}
-
-
-
-func (p *CELParser) Literal() (localctx ILiteralContext) {
- localctx = NewLiteralContext(p, p.GetParserRuleContext(), p.GetState())
- p.EnterRule(localctx, 30, CELParserRULE_literal)
- var _la int
-
- p.SetState(248)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
-
- switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 34, p.GetParserRuleContext()) {
- case 1:
- localctx = NewIntContext(p, localctx)
- p.EnterOuterAlt(localctx, 1)
- p.SetState(235)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserMINUS {
- {
- p.SetState(234)
-
- var _m = p.Match(CELParserMINUS)
-
- localctx.(*IntContext).sign = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(237)
-
- var _m = p.Match(CELParserNUM_INT)
-
- localctx.(*IntContext).tok = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 2:
- localctx = NewUintContext(p, localctx)
- p.EnterOuterAlt(localctx, 2)
- {
- p.SetState(238)
-
- var _m = p.Match(CELParserNUM_UINT)
-
- localctx.(*UintContext).tok = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 3:
- localctx = NewDoubleContext(p, localctx)
- p.EnterOuterAlt(localctx, 3)
- p.SetState(240)
- p.GetErrorHandler().Sync(p)
- if p.HasError() {
- goto errorExit
- }
- _la = p.GetTokenStream().LA(1)
-
-
- if _la == CELParserMINUS {
- {
- p.SetState(239)
-
- var _m = p.Match(CELParserMINUS)
-
- localctx.(*DoubleContext).sign = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- }
- {
- p.SetState(242)
-
- var _m = p.Match(CELParserNUM_FLOAT)
-
- localctx.(*DoubleContext).tok = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 4:
- localctx = NewStringContext(p, localctx)
- p.EnterOuterAlt(localctx, 4)
- {
- p.SetState(243)
-
- var _m = p.Match(CELParserSTRING)
-
- localctx.(*StringContext).tok = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 5:
- localctx = NewBytesContext(p, localctx)
- p.EnterOuterAlt(localctx, 5)
- {
- p.SetState(244)
-
- var _m = p.Match(CELParserBYTES)
-
- localctx.(*BytesContext).tok = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 6:
- localctx = NewBoolTrueContext(p, localctx)
- p.EnterOuterAlt(localctx, 6)
- {
- p.SetState(245)
-
- var _m = p.Match(CELParserCEL_TRUE)
-
- localctx.(*BoolTrueContext).tok = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 7:
- localctx = NewBoolFalseContext(p, localctx)
- p.EnterOuterAlt(localctx, 7)
- {
- p.SetState(246)
-
- var _m = p.Match(CELParserCEL_FALSE)
-
- localctx.(*BoolFalseContext).tok = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
-
- case 8:
- localctx = NewNullContext(p, localctx)
- p.EnterOuterAlt(localctx, 8)
- {
- p.SetState(247)
-
- var _m = p.Match(CELParserNUL)
-
- localctx.(*NullContext).tok = _m
- if p.HasError() {
- // Recognition error - abort rule
- goto errorExit
- }
- }
-
- case antlr.ATNInvalidAltNumber:
- goto errorExit
- }
-
-
-errorExit:
- if p.HasError() {
- v := p.GetError()
- localctx.SetException(v)
- p.GetErrorHandler().ReportError(p, v)
- p.GetErrorHandler().Recover(p, v)
- p.SetError(nil)
- }
- p.ExitRule()
- return localctx
- goto errorExit // Trick to prevent compiler error if the label is not used
-}
-
-
-func (p *CELParser) Sempred(localctx antlr.RuleContext, ruleIndex, predIndex int) bool {
- switch ruleIndex {
- case 4:
- var t *RelationContext = nil
- if localctx != nil { t = localctx.(*RelationContext) }
- return p.Relation_Sempred(t, predIndex)
-
- case 5:
- var t *CalcContext = nil
- if localctx != nil { t = localctx.(*CalcContext) }
- return p.Calc_Sempred(t, predIndex)
-
- case 7:
- var t *MemberContext = nil
- if localctx != nil { t = localctx.(*MemberContext) }
- return p.Member_Sempred(t, predIndex)
-
-
- default:
- panic("No predicate with index: " + fmt.Sprint(ruleIndex))
- }
-}
-
-func (p *CELParser) Relation_Sempred(localctx antlr.RuleContext, predIndex int) bool {
- switch predIndex {
- case 0:
- return p.Precpred(p.GetParserRuleContext(), 1)
-
- default:
- panic("No predicate with index: " + fmt.Sprint(predIndex))
- }
-}
-
-func (p *CELParser) Calc_Sempred(localctx antlr.RuleContext, predIndex int) bool {
- switch predIndex {
- case 1:
- return p.Precpred(p.GetParserRuleContext(), 2)
-
- case 2:
- return p.Precpred(p.GetParserRuleContext(), 1)
-
- default:
- panic("No predicate with index: " + fmt.Sprint(predIndex))
- }
-}
-
-func (p *CELParser) Member_Sempred(localctx antlr.RuleContext, predIndex int) bool {
- switch predIndex {
- case 3:
- return p.Precpred(p.GetParserRuleContext(), 3)
-
- case 4:
- return p.Precpred(p.GetParserRuleContext(), 2)
-
- case 5:
- return p.Precpred(p.GetParserRuleContext(), 1)
-
- default:
- panic("No predicate with index: " + fmt.Sprint(predIndex))
- }
-}
-
diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_visitor.go b/vendor/github.com/google/cel-go/parser/gen/cel_visitor.go
deleted file mode 100644
index d2fbd563a..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/cel_visitor.go
+++ /dev/null
@@ -1,110 +0,0 @@
-// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT.
-
-package gen // CEL
-import "github.com/antlr4-go/antlr/v4"
-
-
-// A complete Visitor for a parse tree produced by CELParser.
-type CELVisitor interface {
- antlr.ParseTreeVisitor
-
- // Visit a parse tree produced by CELParser#start.
- VisitStart(ctx *StartContext) interface{}
-
- // Visit a parse tree produced by CELParser#expr.
- VisitExpr(ctx *ExprContext) interface{}
-
- // Visit a parse tree produced by CELParser#conditionalOr.
- VisitConditionalOr(ctx *ConditionalOrContext) interface{}
-
- // Visit a parse tree produced by CELParser#conditionalAnd.
- VisitConditionalAnd(ctx *ConditionalAndContext) interface{}
-
- // Visit a parse tree produced by CELParser#relation.
- VisitRelation(ctx *RelationContext) interface{}
-
- // Visit a parse tree produced by CELParser#calc.
- VisitCalc(ctx *CalcContext) interface{}
-
- // Visit a parse tree produced by CELParser#MemberExpr.
- VisitMemberExpr(ctx *MemberExprContext) interface{}
-
- // Visit a parse tree produced by CELParser#LogicalNot.
- VisitLogicalNot(ctx *LogicalNotContext) interface{}
-
- // Visit a parse tree produced by CELParser#Negate.
- VisitNegate(ctx *NegateContext) interface{}
-
- // Visit a parse tree produced by CELParser#MemberCall.
- VisitMemberCall(ctx *MemberCallContext) interface{}
-
- // Visit a parse tree produced by CELParser#Select.
- VisitSelect(ctx *SelectContext) interface{}
-
- // Visit a parse tree produced by CELParser#PrimaryExpr.
- VisitPrimaryExpr(ctx *PrimaryExprContext) interface{}
-
- // Visit a parse tree produced by CELParser#Index.
- VisitIndex(ctx *IndexContext) interface{}
-
- // Visit a parse tree produced by CELParser#IdentOrGlobalCall.
- VisitIdentOrGlobalCall(ctx *IdentOrGlobalCallContext) interface{}
-
- // Visit a parse tree produced by CELParser#Nested.
- VisitNested(ctx *NestedContext) interface{}
-
- // Visit a parse tree produced by CELParser#CreateList.
- VisitCreateList(ctx *CreateListContext) interface{}
-
- // Visit a parse tree produced by CELParser#CreateStruct.
- VisitCreateStruct(ctx *CreateStructContext) interface{}
-
- // Visit a parse tree produced by CELParser#CreateMessage.
- VisitCreateMessage(ctx *CreateMessageContext) interface{}
-
- // Visit a parse tree produced by CELParser#ConstantLiteral.
- VisitConstantLiteral(ctx *ConstantLiteralContext) interface{}
-
- // Visit a parse tree produced by CELParser#exprList.
- VisitExprList(ctx *ExprListContext) interface{}
-
- // Visit a parse tree produced by CELParser#listInit.
- VisitListInit(ctx *ListInitContext) interface{}
-
- // Visit a parse tree produced by CELParser#fieldInitializerList.
- VisitFieldInitializerList(ctx *FieldInitializerListContext) interface{}
-
- // Visit a parse tree produced by CELParser#optField.
- VisitOptField(ctx *OptFieldContext) interface{}
-
- // Visit a parse tree produced by CELParser#mapInitializerList.
- VisitMapInitializerList(ctx *MapInitializerListContext) interface{}
-
- // Visit a parse tree produced by CELParser#optExpr.
- VisitOptExpr(ctx *OptExprContext) interface{}
-
- // Visit a parse tree produced by CELParser#Int.
- VisitInt(ctx *IntContext) interface{}
-
- // Visit a parse tree produced by CELParser#Uint.
- VisitUint(ctx *UintContext) interface{}
-
- // Visit a parse tree produced by CELParser#Double.
- VisitDouble(ctx *DoubleContext) interface{}
-
- // Visit a parse tree produced by CELParser#String.
- VisitString(ctx *StringContext) interface{}
-
- // Visit a parse tree produced by CELParser#Bytes.
- VisitBytes(ctx *BytesContext) interface{}
-
- // Visit a parse tree produced by CELParser#BoolTrue.
- VisitBoolTrue(ctx *BoolTrueContext) interface{}
-
- // Visit a parse tree produced by CELParser#BoolFalse.
- VisitBoolFalse(ctx *BoolFalseContext) interface{}
-
- // Visit a parse tree produced by CELParser#Null.
- VisitNull(ctx *NullContext) interface{}
-
-}
\ No newline at end of file
diff --git a/vendor/github.com/google/cel-go/parser/gen/doc.go b/vendor/github.com/google/cel-go/parser/gen/doc.go
deleted file mode 100644
index 57edd4434..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/doc.go
+++ /dev/null
@@ -1,16 +0,0 @@
-// Copyright 2021 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package gen contains all of the ANTLR-generated sources used by the cel-go parser.
-package gen
diff --git a/vendor/github.com/google/cel-go/parser/gen/generate.sh b/vendor/github.com/google/cel-go/parser/gen/generate.sh
deleted file mode 100644
index 27a9559f7..000000000
--- a/vendor/github.com/google/cel-go/parser/gen/generate.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash -eu
-#
-# Copyright 2018 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# To regenerate the CEL lexer/parser statically do the following:
-# 1. Download the latest anltr tool from https://www.antlr.org/download.html
-# 2. Copy the downloaded jar to the gen directory. It will have a name
-# like antlr--complete.jar.
-# 3. Modify the script below to refer to the current ANTLR version.
-# 4. Execute the generation script from the gen directory.
-# 5. Delete the jar and commit the regenerated sources.
-
-#!/bin/sh
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-# Generate AntLR artifacts.
-java -Xmx500M -cp ${DIR}/antlr-4.13.1-complete.jar org.antlr.v4.Tool \
- -Dlanguage=Go \
- -package gen \
- -o ${DIR} \
- -visitor ${DIR}/CEL.g4
-
diff --git a/vendor/github.com/google/cel-go/parser/helper.go b/vendor/github.com/google/cel-go/parser/helper.go
deleted file mode 100644
index 182ff034c..000000000
--- a/vendor/github.com/google/cel-go/parser/helper.go
+++ /dev/null
@@ -1,474 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package parser
-
-import (
- "sync"
-
- antlr "github.com/antlr4-go/antlr/v4"
-
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-type parserHelper struct {
- exprFactory ast.ExprFactory
- source common.Source
- sourceInfo *ast.SourceInfo
- nextID int64
-}
-
-func newParserHelper(source common.Source, fac ast.ExprFactory) *parserHelper {
- return &parserHelper{
- exprFactory: fac,
- source: source,
- sourceInfo: ast.NewSourceInfo(source),
- nextID: 1,
- }
-}
-
-func (p *parserHelper) getSourceInfo() *ast.SourceInfo {
- return p.sourceInfo
-}
-
-func (p *parserHelper) newLiteral(ctx any, value ref.Val) ast.Expr {
- return p.exprFactory.NewLiteral(p.newID(ctx), value)
-}
-
-func (p *parserHelper) newLiteralBool(ctx any, value bool) ast.Expr {
- return p.newLiteral(ctx, types.Bool(value))
-}
-
-func (p *parserHelper) newLiteralString(ctx any, value string) ast.Expr {
- return p.newLiteral(ctx, types.String(value))
-}
-
-func (p *parserHelper) newLiteralBytes(ctx any, value []byte) ast.Expr {
- return p.newLiteral(ctx, types.Bytes(value))
-}
-
-func (p *parserHelper) newLiteralInt(ctx any, value int64) ast.Expr {
- return p.newLiteral(ctx, types.Int(value))
-}
-
-func (p *parserHelper) newLiteralUint(ctx any, value uint64) ast.Expr {
- return p.newLiteral(ctx, types.Uint(value))
-}
-
-func (p *parserHelper) newLiteralDouble(ctx any, value float64) ast.Expr {
- return p.newLiteral(ctx, types.Double(value))
-}
-
-func (p *parserHelper) newIdent(ctx any, name string) ast.Expr {
- return p.exprFactory.NewIdent(p.newID(ctx), name)
-}
-
-func (p *parserHelper) newSelect(ctx any, operand ast.Expr, field string) ast.Expr {
- return p.exprFactory.NewSelect(p.newID(ctx), operand, field)
-}
-
-func (p *parserHelper) newPresenceTest(ctx any, operand ast.Expr, field string) ast.Expr {
- return p.exprFactory.NewPresenceTest(p.newID(ctx), operand, field)
-}
-
-func (p *parserHelper) newGlobalCall(ctx any, function string, args ...ast.Expr) ast.Expr {
- return p.exprFactory.NewCall(p.newID(ctx), function, args...)
-}
-
-func (p *parserHelper) newReceiverCall(ctx any, function string, target ast.Expr, args ...ast.Expr) ast.Expr {
- return p.exprFactory.NewMemberCall(p.newID(ctx), function, target, args...)
-}
-
-func (p *parserHelper) newList(ctx any, elements []ast.Expr, optionals ...int32) ast.Expr {
- return p.exprFactory.NewList(p.newID(ctx), elements, optionals)
-}
-
-func (p *parserHelper) newMap(ctx any, entries ...ast.EntryExpr) ast.Expr {
- return p.exprFactory.NewMap(p.newID(ctx), entries)
-}
-
-func (p *parserHelper) newMapEntry(entryID int64, key ast.Expr, value ast.Expr, optional bool) ast.EntryExpr {
- return p.exprFactory.NewMapEntry(entryID, key, value, optional)
-}
-
-func (p *parserHelper) newObject(ctx any, typeName string, fields ...ast.EntryExpr) ast.Expr {
- return p.exprFactory.NewStruct(p.newID(ctx), typeName, fields)
-}
-
-func (p *parserHelper) newObjectField(fieldID int64, field string, value ast.Expr, optional bool) ast.EntryExpr {
- return p.exprFactory.NewStructField(fieldID, field, value, optional)
-}
-
-func (p *parserHelper) newComprehension(ctx any,
- iterRange ast.Expr,
- iterVar string,
- accuVar string,
- accuInit ast.Expr,
- condition ast.Expr,
- step ast.Expr,
- result ast.Expr) ast.Expr {
- return p.exprFactory.NewComprehension(
- p.newID(ctx), iterRange, iterVar, accuVar, accuInit, condition, step, result)
-}
-
-func (p *parserHelper) newID(ctx any) int64 {
- if id, isID := ctx.(int64); isID {
- return id
- }
- return p.id(ctx)
-}
-
-func (p *parserHelper) newExpr(ctx any) ast.Expr {
- return p.exprFactory.NewUnspecifiedExpr(p.newID(ctx))
-}
-
-func (p *parserHelper) id(ctx any) int64 {
- var offset ast.OffsetRange
- switch c := ctx.(type) {
- case antlr.ParserRuleContext:
- start, stop := c.GetStart(), c.GetStop()
- if stop == nil {
- stop = start
- }
- offset.Start = p.sourceInfo.ComputeOffset(int32(start.GetLine()), int32(start.GetColumn()))
- offset.Stop = p.sourceInfo.ComputeOffset(int32(stop.GetLine()), int32(stop.GetColumn()))
- case antlr.Token:
- offset.Start = p.sourceInfo.ComputeOffset(int32(c.GetLine()), int32(c.GetColumn()))
- offset.Stop = offset.Start
- case common.Location:
- offset.Start = p.sourceInfo.ComputeOffset(int32(c.Line()), int32(c.Column()))
- offset.Stop = offset.Start
- case ast.OffsetRange:
- offset = c
- default:
- // This should only happen if the ctx is nil
- return -1
- }
- id := p.nextID
- p.sourceInfo.SetOffsetRange(id, offset)
- p.nextID++
- return id
-}
-
-func (p *parserHelper) getLocation(id int64) common.Location {
- return p.sourceInfo.GetStartLocation(id)
-}
-
-// buildMacroCallArg iterates the expression and returns a new expression
-// where all macros have been replaced by their IDs in MacroCalls
-func (p *parserHelper) buildMacroCallArg(expr ast.Expr) ast.Expr {
- if _, found := p.sourceInfo.GetMacroCall(expr.ID()); found {
- return p.exprFactory.NewUnspecifiedExpr(expr.ID())
- }
-
- switch expr.Kind() {
- case ast.CallKind:
- // Iterate the AST from `expr` recursively looking for macros. Because we are at most
- // starting from the top level macro, this recursion is bounded by the size of the AST. This
- // means that the depth check on the AST during parsing will catch recursion overflows
- // before we get to here.
- call := expr.AsCall()
- macroArgs := make([]ast.Expr, len(call.Args()))
- for index, arg := range call.Args() {
- macroArgs[index] = p.buildMacroCallArg(arg)
- }
- if !call.IsMemberFunction() {
- return p.exprFactory.NewCall(expr.ID(), call.FunctionName(), macroArgs...)
- }
- macroTarget := p.buildMacroCallArg(call.Target())
- return p.exprFactory.NewMemberCall(expr.ID(), call.FunctionName(), macroTarget, macroArgs...)
- case ast.ListKind:
- list := expr.AsList()
- macroListArgs := make([]ast.Expr, list.Size())
- for i, elem := range list.Elements() {
- macroListArgs[i] = p.buildMacroCallArg(elem)
- }
- return p.exprFactory.NewList(expr.ID(), macroListArgs, list.OptionalIndices())
- }
- return expr
-}
-
-// addMacroCall adds the macro the the MacroCalls map in source info. If a macro has args/subargs/target
-// that are macros, their ID will be stored instead for later self-lookups.
-func (p *parserHelper) addMacroCall(exprID int64, function string, target ast.Expr, args ...ast.Expr) {
- macroArgs := make([]ast.Expr, len(args))
- for index, arg := range args {
- macroArgs[index] = p.buildMacroCallArg(arg)
- }
- if target == nil {
- p.sourceInfo.SetMacroCall(exprID, p.exprFactory.NewCall(0, function, macroArgs...))
- return
- }
- macroTarget := target
- if _, found := p.sourceInfo.GetMacroCall(target.ID()); found {
- macroTarget = p.exprFactory.NewUnspecifiedExpr(target.ID())
- } else {
- macroTarget = p.buildMacroCallArg(target)
- }
- p.sourceInfo.SetMacroCall(exprID, p.exprFactory.NewMemberCall(0, function, macroTarget, macroArgs...))
-}
-
-// logicManager compacts logical trees into a more efficient structure which is semantically
-// equivalent with how the logic graph is constructed by the ANTLR parser.
-//
-// The purpose of the logicManager is to ensure a compact serialization format for the logical &&, ||
-// operators which have a tendency to create long DAGs which are skewed in one direction. Since the
-// operators are commutative re-ordering the terms *must not* affect the evaluation result.
-//
-// The logic manager will either render the terms to N-chained && / || operators as a single logical
-// call with N-terms, or will rebalance the tree. Rebalancing the terms is a safe, if somewhat
-// controversial choice as it alters the traditional order of execution assumptions present in most
-// expressions.
-type logicManager struct {
- exprFactory ast.ExprFactory
- function string
- terms []ast.Expr
- ops []int64
- variadicASTs bool
-}
-
-// newVariadicLogicManager creates a logic manager instance bound to a specific function and its first term.
-func newVariadicLogicManager(fac ast.ExprFactory, function string, term ast.Expr) *logicManager {
- return &logicManager{
- exprFactory: fac,
- function: function,
- terms: []ast.Expr{term},
- ops: []int64{},
- variadicASTs: true,
- }
-}
-
-// newBalancingLogicManager creates a logic manager instance bound to a specific function and its first term.
-func newBalancingLogicManager(fac ast.ExprFactory, function string, term ast.Expr) *logicManager {
- return &logicManager{
- exprFactory: fac,
- function: function,
- terms: []ast.Expr{term},
- ops: []int64{},
- variadicASTs: false,
- }
-}
-
-// addTerm adds an operation identifier and term to the set of terms to be balanced.
-func (l *logicManager) addTerm(op int64, term ast.Expr) {
- l.terms = append(l.terms, term)
- l.ops = append(l.ops, op)
-}
-
-// toExpr renders the logic graph into an Expr value, either balancing a tree of logical
-// operations or creating a variadic representation of the logical operator.
-func (l *logicManager) toExpr() ast.Expr {
- if len(l.terms) == 1 {
- return l.terms[0]
- }
- if l.variadicASTs {
- return l.exprFactory.NewCall(l.ops[0], l.function, l.terms...)
- }
- return l.balancedTree(0, len(l.ops)-1)
-}
-
-// balancedTree recursively balances the terms provided to a commutative operator.
-func (l *logicManager) balancedTree(lo, hi int) ast.Expr {
- mid := (lo + hi + 1) / 2
-
- var left ast.Expr
- if mid == lo {
- left = l.terms[mid]
- } else {
- left = l.balancedTree(lo, mid-1)
- }
-
- var right ast.Expr
- if mid == hi {
- right = l.terms[mid+1]
- } else {
- right = l.balancedTree(mid+1, hi)
- }
- return l.exprFactory.NewCall(l.ops[mid], l.function, left, right)
-}
-
-type exprHelper struct {
- *parserHelper
- id int64
-}
-
-func (e *exprHelper) nextMacroID() int64 {
- return e.parserHelper.id(e.parserHelper.getLocation(e.id))
-}
-
-// Copy implements the ExprHelper interface method by producing a copy of the input Expr value
-// with a fresh set of numeric identifiers the Expr and all its descendants.
-func (e *exprHelper) Copy(expr ast.Expr) ast.Expr {
- offsetRange, _ := e.parserHelper.sourceInfo.GetOffsetRange(expr.ID())
- copyID := e.parserHelper.newID(offsetRange)
- switch expr.Kind() {
- case ast.LiteralKind:
- return e.exprFactory.NewLiteral(copyID, expr.AsLiteral())
- case ast.IdentKind:
- return e.exprFactory.NewIdent(copyID, expr.AsIdent())
- case ast.SelectKind:
- sel := expr.AsSelect()
- op := e.Copy(sel.Operand())
- if sel.IsTestOnly() {
- return e.exprFactory.NewPresenceTest(copyID, op, sel.FieldName())
- }
- return e.exprFactory.NewSelect(copyID, op, sel.FieldName())
- case ast.CallKind:
- call := expr.AsCall()
- args := call.Args()
- argsCopy := make([]ast.Expr, len(args))
- for i, arg := range args {
- argsCopy[i] = e.Copy(arg)
- }
- if !call.IsMemberFunction() {
- return e.exprFactory.NewCall(copyID, call.FunctionName(), argsCopy...)
- }
- return e.exprFactory.NewMemberCall(copyID, call.FunctionName(), e.Copy(call.Target()), argsCopy...)
- case ast.ListKind:
- list := expr.AsList()
- elems := list.Elements()
- elemsCopy := make([]ast.Expr, len(elems))
- for i, elem := range elems {
- elemsCopy[i] = e.Copy(elem)
- }
- return e.exprFactory.NewList(copyID, elemsCopy, list.OptionalIndices())
- case ast.MapKind:
- m := expr.AsMap()
- entries := m.Entries()
- entriesCopy := make([]ast.EntryExpr, len(entries))
- for i, en := range entries {
- entry := en.AsMapEntry()
- entryID := e.nextMacroID()
- entriesCopy[i] = e.exprFactory.NewMapEntry(entryID,
- e.Copy(entry.Key()), e.Copy(entry.Value()), entry.IsOptional())
- }
- return e.exprFactory.NewMap(copyID, entriesCopy)
- case ast.StructKind:
- s := expr.AsStruct()
- fields := s.Fields()
- fieldsCopy := make([]ast.EntryExpr, len(fields))
- for i, f := range fields {
- field := f.AsStructField()
- fieldID := e.nextMacroID()
- fieldsCopy[i] = e.exprFactory.NewStructField(fieldID,
- field.Name(), e.Copy(field.Value()), field.IsOptional())
- }
- return e.exprFactory.NewStruct(copyID, s.TypeName(), fieldsCopy)
- case ast.ComprehensionKind:
- compre := expr.AsComprehension()
- iterRange := e.Copy(compre.IterRange())
- accuInit := e.Copy(compre.AccuInit())
- cond := e.Copy(compre.LoopCondition())
- step := e.Copy(compre.LoopStep())
- result := e.Copy(compre.Result())
- return e.exprFactory.NewComprehension(copyID,
- iterRange, compre.IterVar(), compre.AccuVar(), accuInit, cond, step, result)
- }
- return e.exprFactory.NewUnspecifiedExpr(copyID)
-}
-
-// NewLiteral implements the ExprHelper interface method.
-func (e *exprHelper) NewLiteral(value ref.Val) ast.Expr {
- return e.exprFactory.NewLiteral(e.nextMacroID(), value)
-}
-
-// NewList implements the ExprHelper interface method.
-func (e *exprHelper) NewList(elems ...ast.Expr) ast.Expr {
- return e.exprFactory.NewList(e.nextMacroID(), elems, []int32{})
-}
-
-// NewMap implements the ExprHelper interface method.
-func (e *exprHelper) NewMap(entries ...ast.EntryExpr) ast.Expr {
- return e.exprFactory.NewMap(e.nextMacroID(), entries)
-}
-
-// NewMapEntry implements the ExprHelper interface method.
-func (e *exprHelper) NewMapEntry(key ast.Expr, val ast.Expr, optional bool) ast.EntryExpr {
- return e.exprFactory.NewMapEntry(e.nextMacroID(), key, val, optional)
-}
-
-// NewStruct implements the ExprHelper interface method.
-func (e *exprHelper) NewStruct(typeName string, fieldInits ...ast.EntryExpr) ast.Expr {
- return e.exprFactory.NewStruct(e.nextMacroID(), typeName, fieldInits)
-}
-
-// NewStructField implements the ExprHelper interface method.
-func (e *exprHelper) NewStructField(field string, init ast.Expr, optional bool) ast.EntryExpr {
- return e.exprFactory.NewStructField(e.nextMacroID(), field, init, optional)
-}
-
-// NewComprehension implements the ExprHelper interface method.
-func (e *exprHelper) NewComprehension(
- iterRange ast.Expr,
- iterVar string,
- accuVar string,
- accuInit ast.Expr,
- condition ast.Expr,
- step ast.Expr,
- result ast.Expr) ast.Expr {
- return e.exprFactory.NewComprehension(
- e.nextMacroID(), iterRange, iterVar, accuVar, accuInit, condition, step, result)
-}
-
-// NewIdent implements the ExprHelper interface method.
-func (e *exprHelper) NewIdent(name string) ast.Expr {
- return e.exprFactory.NewIdent(e.nextMacroID(), name)
-}
-
-// NewAccuIdent implements the ExprHelper interface method.
-func (e *exprHelper) NewAccuIdent() ast.Expr {
- return e.exprFactory.NewAccuIdent(e.nextMacroID())
-}
-
-// NewGlobalCall implements the ExprHelper interface method.
-func (e *exprHelper) NewCall(function string, args ...ast.Expr) ast.Expr {
- return e.exprFactory.NewCall(e.nextMacroID(), function, args...)
-}
-
-// NewMemberCall implements the ExprHelper interface method.
-func (e *exprHelper) NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr {
- return e.exprFactory.NewMemberCall(e.nextMacroID(), function, target, args...)
-}
-
-// NewPresenceTest implements the ExprHelper interface method.
-func (e *exprHelper) NewPresenceTest(operand ast.Expr, field string) ast.Expr {
- return e.exprFactory.NewPresenceTest(e.nextMacroID(), operand, field)
-}
-
-// NewSelect implements the ExprHelper interface method.
-func (e *exprHelper) NewSelect(operand ast.Expr, field string) ast.Expr {
- return e.exprFactory.NewSelect(e.nextMacroID(), operand, field)
-}
-
-// OffsetLocation implements the ExprHelper interface method.
-func (e *exprHelper) OffsetLocation(exprID int64) common.Location {
- return e.parserHelper.sourceInfo.GetStartLocation(exprID)
-}
-
-// NewError associates an error message with a given expression id, populating the source offset location of the error if possible.
-func (e *exprHelper) NewError(exprID int64, message string) *common.Error {
- return common.NewError(exprID, message, e.OffsetLocation(exprID))
-}
-
-var (
- // Thread-safe pool of ExprHelper values to minimize alloc overhead of ExprHelper creations.
- exprHelperPool = &sync.Pool{
- New: func() any {
- return &exprHelper{}
- },
- }
-)
diff --git a/vendor/github.com/google/cel-go/parser/input.go b/vendor/github.com/google/cel-go/parser/input.go
deleted file mode 100644
index 44792455d..000000000
--- a/vendor/github.com/google/cel-go/parser/input.go
+++ /dev/null
@@ -1,129 +0,0 @@
-// Copyright 2021 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package parser
-
-import (
- antlr "github.com/antlr4-go/antlr/v4"
-
- "github.com/google/cel-go/common/runes"
-)
-
-type charStream struct {
- buf runes.Buffer
- pos int
- src string
-}
-
-// Consume implements (antlr.CharStream).Consume.
-func (c *charStream) Consume() {
- if c.pos >= c.buf.Len() {
- panic("cannot consume EOF")
- }
- c.pos++
-}
-
-// LA implements (antlr.CharStream).LA.
-func (c *charStream) LA(offset int) int {
- if offset == 0 {
- return 0
- }
- if offset < 0 {
- offset++
- }
- pos := c.pos + offset - 1
- if pos < 0 || pos >= c.buf.Len() {
- return antlr.TokenEOF
- }
- return int(c.buf.Get(pos))
-}
-
-// LT mimics (*antlr.InputStream).LT.
-func (c *charStream) LT(offset int) int {
- return c.LA(offset)
-}
-
-// Mark implements (antlr.CharStream).Mark.
-func (c *charStream) Mark() int {
- return -1
-}
-
-// Release implements (antlr.CharStream).Release.
-func (c *charStream) Release(marker int) {}
-
-// Index implements (antlr.CharStream).Index.
-func (c *charStream) Index() int {
- return c.pos
-}
-
-// Seek implements (antlr.CharStream).Seek.
-func (c *charStream) Seek(index int) {
- if index <= c.pos {
- c.pos = index
- return
- }
- if index < c.buf.Len() {
- c.pos = index
- } else {
- c.pos = c.buf.Len()
- }
-}
-
-// Size implements (antlr.CharStream).Size.
-func (c *charStream) Size() int {
- return c.buf.Len()
-}
-
-// GetSourceName implements (antlr.CharStream).GetSourceName.
-func (c *charStream) GetSourceName() string {
- return c.src
-}
-
-// GetText implements (antlr.CharStream).GetText.
-func (c *charStream) GetText(start, stop int) string {
- if stop >= c.buf.Len() {
- stop = c.buf.Len() - 1
- }
- if start >= c.buf.Len() {
- return ""
- }
- return c.buf.Slice(start, stop+1)
-}
-
-// GetTextFromTokens implements (antlr.CharStream).GetTextFromTokens.
-func (c *charStream) GetTextFromTokens(start, stop antlr.Token) string {
- if start != nil && stop != nil {
- return c.GetText(start.GetTokenIndex(), stop.GetTokenIndex())
- }
- return ""
-}
-
-// GetTextFromInterval implements (antlr.CharStream).GetTextFromInterval.
-func (c *charStream) GetTextFromInterval(i antlr.Interval) string {
- return c.GetText(i.Start, i.Stop)
-}
-
-// String mimics (*antlr.InputStream).String.
-func (c *charStream) String() string {
- return c.buf.Slice(0, c.buf.Len())
-}
-
-var _ antlr.CharStream = &charStream{}
-
-func newCharStream(buf runes.Buffer, desc string) antlr.CharStream {
- return &charStream{
- buf: buf,
- src: desc,
- }
-}
diff --git a/vendor/github.com/google/cel-go/parser/macro.go b/vendor/github.com/google/cel-go/parser/macro.go
deleted file mode 100644
index 5b1775bed..000000000
--- a/vendor/github.com/google/cel-go/parser/macro.go
+++ /dev/null
@@ -1,406 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package parser
-
-import (
- "fmt"
-
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/common/types/ref"
-)
-
-// NewGlobalMacro creates a Macro for a global function with the specified arg count.
-func NewGlobalMacro(function string, argCount int, expander MacroExpander) Macro {
- return ¯o{
- function: function,
- argCount: argCount,
- expander: expander}
-}
-
-// NewReceiverMacro creates a Macro for a receiver function matching the specified arg count.
-func NewReceiverMacro(function string, argCount int, expander MacroExpander) Macro {
- return ¯o{
- function: function,
- argCount: argCount,
- expander: expander,
- receiverStyle: true}
-}
-
-// NewGlobalVarArgMacro creates a Macro for a global function with a variable arg count.
-func NewGlobalVarArgMacro(function string, expander MacroExpander) Macro {
- return ¯o{
- function: function,
- expander: expander,
- varArgStyle: true}
-}
-
-// NewReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count.
-func NewReceiverVarArgMacro(function string, expander MacroExpander) Macro {
- return ¯o{
- function: function,
- expander: expander,
- receiverStyle: true,
- varArgStyle: true}
-}
-
-// Macro interface for describing the function signature to match and the MacroExpander to apply.
-//
-// Note: when a Macro should apply to multiple overloads (based on arg count) of a given function,
-// a Macro should be created per arg-count.
-type Macro interface {
- // Function name to match.
- Function() string
-
- // ArgCount for the function call.
- //
- // When the macro is a var-arg style macro, the return value will be zero, but the MacroKey
- // will contain a `*` where the arg count would have been.
- ArgCount() int
-
- // IsReceiverStyle returns true if the macro matches a receiver style call.
- IsReceiverStyle() bool
-
- // MacroKey returns the macro signatures accepted by this macro.
- //
- // Format: `::`.
- //
- // When the macros is a var-arg style macro, the `arg-count` value is represented as a `*`.
- MacroKey() string
-
- // Expander returns the MacroExpander to apply when the macro key matches the parsed call
- // signature.
- Expander() MacroExpander
-}
-
-// Macro type which declares the function name and arg count expected for the
-// macro, as well as a macro expansion function.
-type macro struct {
- function string
- receiverStyle bool
- varArgStyle bool
- argCount int
- expander MacroExpander
-}
-
-// Function returns the macro's function name (i.e. the function whose syntax it mimics).
-func (m *macro) Function() string {
- return m.function
-}
-
-// ArgCount returns the number of arguments the macro expects.
-func (m *macro) ArgCount() int {
- return m.argCount
-}
-
-// IsReceiverStyle returns whether the macro is receiver style.
-func (m *macro) IsReceiverStyle() bool {
- return m.receiverStyle
-}
-
-// Expander implements the Macro interface method.
-func (m *macro) Expander() MacroExpander {
- return m.expander
-}
-
-// MacroKey implements the Macro interface method.
-func (m *macro) MacroKey() string {
- if m.varArgStyle {
- return makeVarArgMacroKey(m.function, m.receiverStyle)
- }
- return makeMacroKey(m.function, m.argCount, m.receiverStyle)
-}
-
-func makeMacroKey(name string, args int, receiverStyle bool) string {
- return fmt.Sprintf("%s:%d:%v", name, args, receiverStyle)
-}
-
-func makeVarArgMacroKey(name string, receiverStyle bool) string {
- return fmt.Sprintf("%s:*:%v", name, receiverStyle)
-}
-
-// MacroExpander converts a call and its associated arguments into a new CEL abstract syntax tree.
-//
-// If the MacroExpander determines within the implementation that an expansion is not needed it may return
-// a nil Expr value to indicate a non-match. However, if an expansion is to be performed, but the arguments
-// are not well-formed, the result of the expansion will be an error.
-//
-// The MacroExpander accepts as arguments a MacroExprHelper as well as the arguments used in the function call
-// and produces as output an Expr ast node.
-//
-// Note: when the Macro.IsReceiverStyle() method returns true, the target argument will be nil.
-type MacroExpander func(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error)
-
-// ExprHelper assists with the creation of Expr values in a manner which is consistent
-// the internal semantics and id generation behaviors of the parser and checker libraries.
-type ExprHelper interface {
- // Copy the input expression with a brand new set of identifiers.
- Copy(ast.Expr) ast.Expr
-
- // Literal creates an Expr value for a scalar literal value.
- NewLiteral(value ref.Val) ast.Expr
-
- // NewList creates a list literal instruction with an optional set of elements.
- NewList(elems ...ast.Expr) ast.Expr
-
- // NewMap creates a CreateStruct instruction for a map where the map is comprised of the
- // optional set of key, value entries.
- NewMap(entries ...ast.EntryExpr) ast.Expr
-
- // NewMapEntry creates a Map Entry for the key, value pair.
- NewMapEntry(key ast.Expr, val ast.Expr, optional bool) ast.EntryExpr
-
- // NewStruct creates a struct literal expression with an optional set of field initializers.
- NewStruct(typeName string, fieldInits ...ast.EntryExpr) ast.Expr
-
- // NewStructField creates a new struct field initializer from the field name and value.
- NewStructField(field string, init ast.Expr, optional bool) ast.EntryExpr
-
- // NewComprehension creates a new comprehension instruction.
- //
- // - iterRange represents the expression that resolves to a list or map where the elements or
- // keys (respectively) will be iterated over.
- // - iterVar is the iteration variable name.
- // - accuVar is the accumulation variable name, typically parser.AccumulatorName.
- // - accuInit is the initial expression whose value will be set for the accuVar prior to
- // folding.
- // - condition is the expression to test to determine whether to continue folding.
- // - step is the expression to evaluation at the conclusion of a single fold iteration.
- // - result is the computation to evaluate at the conclusion of the fold.
- //
- // The accuVar should not shadow variable names that you would like to reference within the
- // environment in the step and condition expressions. Presently, the name __result__ is commonly
- // used by built-in macros but this may change in the future.
- NewComprehension(iterRange ast.Expr,
- iterVar string,
- accuVar string,
- accuInit ast.Expr,
- condition ast.Expr,
- step ast.Expr,
- result ast.Expr) ast.Expr
-
- // NewIdent creates an identifier Expr value.
- NewIdent(name string) ast.Expr
-
- // NewAccuIdent returns an accumulator identifier for use with comprehension results.
- NewAccuIdent() ast.Expr
-
- // NewCall creates a function call Expr value for a global (free) function.
- NewCall(function string, args ...ast.Expr) ast.Expr
-
- // NewMemberCall creates a function call Expr value for a receiver-style function.
- NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr
-
- // NewPresenceTest creates a Select TestOnly Expr value for modelling has() semantics.
- NewPresenceTest(operand ast.Expr, field string) ast.Expr
-
- // NewSelect create a field traversal Expr value.
- NewSelect(operand ast.Expr, field string) ast.Expr
-
- // OffsetLocation returns the Location of the expression identifier.
- OffsetLocation(exprID int64) common.Location
-
- // NewError associates an error message with a given expression id.
- NewError(exprID int64, message string) *common.Error
-}
-
-var (
- // HasMacro expands "has(m.f)" which tests the presence of a field, avoiding the need to
- // specify the field as a string.
- HasMacro = NewGlobalMacro(operators.Has, 1, MakeHas)
-
- // AllMacro expands "range.all(var, predicate)" into a comprehension which ensures that all
- // elements in the range satisfy the predicate.
- AllMacro = NewReceiverMacro(operators.All, 2, MakeAll)
-
- // ExistsMacro expands "range.exists(var, predicate)" into a comprehension which ensures that
- // some element in the range satisfies the predicate.
- ExistsMacro = NewReceiverMacro(operators.Exists, 2, MakeExists)
-
- // ExistsOneMacro expands "range.exists_one(var, predicate)", which is true if for exactly one
- // element in range the predicate holds.
- ExistsOneMacro = NewReceiverMacro(operators.ExistsOne, 2, MakeExistsOne)
-
- // MapMacro expands "range.map(var, function)" into a comprehension which applies the function
- // to each element in the range to produce a new list.
- MapMacro = NewReceiverMacro(operators.Map, 2, MakeMap)
-
- // MapFilterMacro expands "range.map(var, predicate, function)" into a comprehension which
- // first filters the elements in the range by the predicate, then applies the transform function
- // to produce a new list.
- MapFilterMacro = NewReceiverMacro(operators.Map, 3, MakeMap)
-
- // FilterMacro expands "range.filter(var, predicate)" into a comprehension which filters
- // elements in the range, producing a new list from the elements that satisfy the predicate.
- FilterMacro = NewReceiverMacro(operators.Filter, 2, MakeFilter)
-
- // AllMacros includes the list of all spec-supported macros.
- AllMacros = []Macro{
- HasMacro,
- AllMacro,
- ExistsMacro,
- ExistsOneMacro,
- MapMacro,
- MapFilterMacro,
- FilterMacro,
- }
-
- // NoMacros list.
- NoMacros = []Macro{}
-)
-
-// AccumulatorName is the traditional variable name assigned to the fold accumulator variable.
-const AccumulatorName = "__result__"
-
-type quantifierKind int
-
-const (
- quantifierAll quantifierKind = iota
- quantifierExists
- quantifierExistsOne
-)
-
-// MakeAll expands the input call arguments into a comprehension that returns true if all of the
-// elements in the range match the predicate expressions:
-// .all(, )
-func MakeAll(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) {
- return makeQuantifier(quantifierAll, eh, target, args)
-}
-
-// MakeExists expands the input call arguments into a comprehension that returns true if any of the
-// elements in the range match the predicate expressions:
-// .exists(, )
-func MakeExists(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) {
- return makeQuantifier(quantifierExists, eh, target, args)
-}
-
-// MakeExistsOne expands the input call arguments into a comprehension that returns true if exactly
-// one of the elements in the range match the predicate expressions:
-// .exists_one(, )
-func MakeExistsOne(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) {
- return makeQuantifier(quantifierExistsOne, eh, target, args)
-}
-
-// MakeMap expands the input call arguments into a comprehension that transforms each element in the
-// input to produce an output list.
-//
-// There are two call patterns supported by map:
-//
-// .map(, )
-// .map(, , )
-//
-// In the second form only iterVar values which return true when provided to the predicate expression
-// are transformed.
-func MakeMap(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) {
- v, found := extractIdent(args[0])
- if !found {
- return nil, eh.NewError(args[0].ID(), "argument is not an identifier")
- }
-
- var fn ast.Expr
- var filter ast.Expr
-
- if len(args) == 3 {
- filter = args[1]
- fn = args[2]
- } else {
- filter = nil
- fn = args[1]
- }
-
- accuExpr := eh.NewAccuIdent()
- init := eh.NewList()
- condition := eh.NewLiteral(types.True)
- step := eh.NewCall(operators.Add, accuExpr, eh.NewList(fn))
-
- if filter != nil {
- step = eh.NewCall(operators.Conditional, filter, step, accuExpr)
- }
- return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, accuExpr), nil
-}
-
-// MakeFilter expands the input call arguments into a comprehension which produces a list which contains
-// only elements which match the provided predicate expression:
-// .filter(, )
-func MakeFilter(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) {
- v, found := extractIdent(args[0])
- if !found {
- return nil, eh.NewError(args[0].ID(), "argument is not an identifier")
- }
-
- filter := args[1]
- accuExpr := eh.NewAccuIdent()
- init := eh.NewList()
- condition := eh.NewLiteral(types.True)
- step := eh.NewCall(operators.Add, accuExpr, eh.NewList(args[0]))
- step = eh.NewCall(operators.Conditional, filter, step, accuExpr)
- return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, accuExpr), nil
-}
-
-// MakeHas expands the input call arguments into a presence test, e.g. has(.field)
-func MakeHas(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) {
- if args[0].Kind() == ast.SelectKind {
- s := args[0].AsSelect()
- return eh.NewPresenceTest(s.Operand(), s.FieldName()), nil
- }
- return nil, eh.NewError(args[0].ID(), "invalid argument to has() macro")
-}
-
-func makeQuantifier(kind quantifierKind, eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) {
- v, found := extractIdent(args[0])
- if !found {
- return nil, eh.NewError(args[0].ID(), "argument must be a simple name")
- }
-
- var init ast.Expr
- var condition ast.Expr
- var step ast.Expr
- var result ast.Expr
- switch kind {
- case quantifierAll:
- init = eh.NewLiteral(types.True)
- condition = eh.NewCall(operators.NotStrictlyFalse, eh.NewAccuIdent())
- step = eh.NewCall(operators.LogicalAnd, eh.NewAccuIdent(), args[1])
- result = eh.NewAccuIdent()
- case quantifierExists:
- init = eh.NewLiteral(types.False)
- condition = eh.NewCall(
- operators.NotStrictlyFalse,
- eh.NewCall(operators.LogicalNot, eh.NewAccuIdent()))
- step = eh.NewCall(operators.LogicalOr, eh.NewAccuIdent(), args[1])
- result = eh.NewAccuIdent()
- case quantifierExistsOne:
- zeroExpr := eh.NewLiteral(types.Int(0))
- oneExpr := eh.NewLiteral(types.Int(1))
- init = zeroExpr
- condition = eh.NewLiteral(types.True)
- step = eh.NewCall(operators.Conditional, args[1],
- eh.NewCall(operators.Add, eh.NewAccuIdent(), oneExpr), eh.NewAccuIdent())
- result = eh.NewCall(operators.Equals, eh.NewAccuIdent(), oneExpr)
- default:
- return nil, eh.NewError(args[0].ID(), fmt.Sprintf("unrecognized quantifier '%v'", kind))
- }
- return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, result), nil
-}
-
-func extractIdent(e ast.Expr) (string, bool) {
- switch e.Kind() {
- case ast.IdentKind:
- return e.AsIdent(), true
- }
- return "", false
-}
diff --git a/vendor/github.com/google/cel-go/parser/options.go b/vendor/github.com/google/cel-go/parser/options.go
deleted file mode 100644
index 61fc3adec..000000000
--- a/vendor/github.com/google/cel-go/parser/options.go
+++ /dev/null
@@ -1,140 +0,0 @@
-// Copyright 2021 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package parser
-
-import "fmt"
-
-type options struct {
- maxRecursionDepth int
- errorReportingLimit int
- errorRecoveryTokenLookaheadLimit int
- errorRecoveryLimit int
- expressionSizeCodePointLimit int
- macros map[string]Macro
- populateMacroCalls bool
- enableOptionalSyntax bool
- enableVariadicOperatorASTs bool
-}
-
-// Option configures the behavior of the parser.
-type Option func(*options) error
-
-// MaxRecursionDepth limits the maximum depth the parser will attempt to parse the expression before giving up.
-func MaxRecursionDepth(limit int) Option {
- return func(opts *options) error {
- if limit < -1 {
- return fmt.Errorf("max recursion depth must be greater than or equal to -1: %d", limit)
- }
- opts.maxRecursionDepth = limit
- return nil
- }
-}
-
-// ErrorRecoveryLookaheadTokenLimit limits the number of lexer tokens that may be considered during error recovery.
-//
-// Error recovery often involves looking ahead in the input to determine if there's a point at which parsing may
-// successfully resume. In some pathological cases, the parser can look through quite a large set of input which
-// in turn generates a lot of back-tracking and performance degredation.
-//
-// The limit must be >= 1, and is recommended to be less than the default of 256.
-func ErrorRecoveryLookaheadTokenLimit(limit int) Option {
- return func(opts *options) error {
- if limit < 1 {
- return fmt.Errorf("error recovery lookahead token limit must be at least 1: %d", limit)
- }
- opts.errorRecoveryTokenLookaheadLimit = limit
- return nil
- }
-}
-
-// ErrorRecoveryLimit limits the number of attempts the parser will perform to recover from an error.
-func ErrorRecoveryLimit(limit int) Option {
- return func(opts *options) error {
- if limit < -1 {
- return fmt.Errorf("error recovery limit must be greater than or equal to -1: %d", limit)
- }
- opts.errorRecoveryLimit = limit
- return nil
- }
-}
-
-// ErrorReportingLimit limits the number of syntax error reports before terminating parsing.
-//
-// The limit must be at least 1. If unset, the limit will be 100.
-func ErrorReportingLimit(limit int) Option {
- return func(opts *options) error {
- if limit < 1 {
- return fmt.Errorf("error reporting limit must be at least 1: %d", limit)
- }
- opts.errorReportingLimit = limit
- return nil
- }
-}
-
-// ExpressionSizeCodePointLimit is an option which limits the maximum code point count of an
-// expression.
-func ExpressionSizeCodePointLimit(expressionSizeCodePointLimit int) Option {
- return func(opts *options) error {
- if expressionSizeCodePointLimit < -1 {
- return fmt.Errorf("expression size code point limit must be greater than or equal to -1: %d", expressionSizeCodePointLimit)
- }
- opts.expressionSizeCodePointLimit = expressionSizeCodePointLimit
- return nil
- }
-}
-
-// Macros adds the given macros to the parser.
-func Macros(macros ...Macro) Option {
- return func(opts *options) error {
- for _, m := range macros {
- if m != nil {
- if opts.macros == nil {
- opts.macros = make(map[string]Macro)
- }
- opts.macros[m.MacroKey()] = m
- }
- }
- return nil
- }
-}
-
-// PopulateMacroCalls ensures that the original call signatures replaced by expanded macros
-// are preserved in the `SourceInfo` of parse result.
-func PopulateMacroCalls(populateMacroCalls bool) Option {
- return func(opts *options) error {
- opts.populateMacroCalls = populateMacroCalls
- return nil
- }
-}
-
-// EnableOptionalSyntax enables syntax for optional field and index selection.
-func EnableOptionalSyntax(optionalSyntax bool) Option {
- return func(opts *options) error {
- opts.enableOptionalSyntax = optionalSyntax
- return nil
- }
-}
-
-// EnableVariadicOperatorASTs enables a compact representation of chained like-kind commutative
-// operators. e.g. `a || b || c || d` -> `call(op='||', args=[a, b, c, d])`
-//
-// The benefit of enabling variadic operators ASTs is a more compact representation deeply nested
-// logic graphs.
-func EnableVariadicOperatorASTs(varArgASTs bool) Option {
- return func(opts *options) error {
- opts.enableVariadicOperatorASTs = varArgASTs
- return nil
- }
-}
diff --git a/vendor/github.com/google/cel-go/parser/parser.go b/vendor/github.com/google/cel-go/parser/parser.go
deleted file mode 100644
index cb753df73..000000000
--- a/vendor/github.com/google/cel-go/parser/parser.go
+++ /dev/null
@@ -1,1008 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package parser declares an expression parser with support for macro
-// expansion.
-package parser
-
-import (
- "fmt"
- "regexp"
- "strconv"
- "strings"
-
- antlr "github.com/antlr4-go/antlr/v4"
-
- "github.com/google/cel-go/common"
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/runes"
- "github.com/google/cel-go/common/types"
- "github.com/google/cel-go/parser/gen"
-)
-
-// Parser encapsulates the context necessary to perform parsing for different expressions.
-type Parser struct {
- options
-}
-
-// NewParser builds and returns a new Parser using the provided options.
-func NewParser(opts ...Option) (*Parser, error) {
- p := &Parser{}
- for _, opt := range opts {
- if err := opt(&p.options); err != nil {
- return nil, err
- }
- }
- if p.errorReportingLimit == 0 {
- p.errorReportingLimit = 100
- }
- if p.maxRecursionDepth == 0 {
- p.maxRecursionDepth = 250
- }
- if p.maxRecursionDepth == -1 {
- p.maxRecursionDepth = int((^uint(0)) >> 1)
- }
- if p.errorRecoveryTokenLookaheadLimit == 0 {
- p.errorRecoveryTokenLookaheadLimit = 256
- }
- if p.errorRecoveryLimit == 0 {
- p.errorRecoveryLimit = 30
- }
- if p.errorRecoveryLimit == -1 {
- p.errorRecoveryLimit = int((^uint(0)) >> 1)
- }
- if p.expressionSizeCodePointLimit == 0 {
- p.expressionSizeCodePointLimit = 100_000
- }
- if p.expressionSizeCodePointLimit == -1 {
- p.expressionSizeCodePointLimit = int((^uint(0)) >> 1)
- }
- // Bool is false by default, so populateMacroCalls will be false by default
- return p, nil
-}
-
-// mustNewParser does the work of NewParser and panics if an error occurs.
-//
-// This function is only intended for internal use and is for backwards compatibility in Parse and
-// ParseWithMacros, where we know the options will result in an error.
-func mustNewParser(opts ...Option) *Parser {
- p, err := NewParser(opts...)
- if err != nil {
- panic(err)
- }
- return p
-}
-
-// Parse parses the expression represented by source and returns the result.
-func (p *Parser) Parse(source common.Source) (*ast.AST, *common.Errors) {
- errs := common.NewErrors(source)
- fac := ast.NewExprFactory()
- impl := parser{
- errors: &parseErrors{errs},
- exprFactory: fac,
- helper: newParserHelper(source, fac),
- macros: p.macros,
- maxRecursionDepth: p.maxRecursionDepth,
- errorReportingLimit: p.errorReportingLimit,
- errorRecoveryLimit: p.errorRecoveryLimit,
- errorRecoveryLookaheadTokenLimit: p.errorRecoveryTokenLookaheadLimit,
- populateMacroCalls: p.populateMacroCalls,
- enableOptionalSyntax: p.enableOptionalSyntax,
- enableVariadicOperatorASTs: p.enableVariadicOperatorASTs,
- }
- buf, ok := source.(runes.Buffer)
- if !ok {
- buf = runes.NewBuffer(source.Content())
- }
- var out ast.Expr
- if buf.Len() > p.expressionSizeCodePointLimit {
- out = impl.reportError(common.NoLocation,
- "expression code point size exceeds limit: size: %d, limit %d",
- buf.Len(), p.expressionSizeCodePointLimit)
- } else {
- out = impl.parse(buf, source.Description())
- }
- return ast.NewAST(out, impl.helper.getSourceInfo()), errs
-}
-
-// reservedIds are not legal to use as variables. We exclude them post-parse, as they *are* valid
-// field names for protos, and it would complicate the grammar to distinguish the cases.
-var reservedIds = map[string]struct{}{
- "as": {},
- "break": {},
- "const": {},
- "continue": {},
- "else": {},
- "false": {},
- "for": {},
- "function": {},
- "if": {},
- "import": {},
- "in": {},
- "let": {},
- "loop": {},
- "package": {},
- "namespace": {},
- "null": {},
- "return": {},
- "true": {},
- "var": {},
- "void": {},
- "while": {},
-}
-
-// Parse converts a source input a parsed expression.
-// This function calls ParseWithMacros with AllMacros.
-//
-// Deprecated: Use NewParser().Parse() instead.
-func Parse(source common.Source) (*ast.AST, *common.Errors) {
- return mustNewParser(Macros(AllMacros...)).Parse(source)
-}
-
-type recursionError struct {
- message string
-}
-
-// Error implements error.
-func (re *recursionError) Error() string {
- return re.message
-}
-
-var _ error = &recursionError{}
-
-type recursionListener struct {
- maxDepth int
- ruleTypeDepth map[int]*int
-}
-
-func (rl *recursionListener) VisitTerminal(node antlr.TerminalNode) {}
-
-func (rl *recursionListener) VisitErrorNode(node antlr.ErrorNode) {}
-
-func (rl *recursionListener) EnterEveryRule(ctx antlr.ParserRuleContext) {
- if ctx == nil {
- return
- }
- ruleIndex := ctx.GetRuleIndex()
- depth, found := rl.ruleTypeDepth[ruleIndex]
- if !found {
- var counter = 1
- rl.ruleTypeDepth[ruleIndex] = &counter
- depth = &counter
- } else {
- *depth++
- }
- if *depth > rl.maxDepth {
- panic(&recursionError{
- message: fmt.Sprintf("expression recursion limit exceeded: %d", rl.maxDepth),
- })
- }
-}
-
-func (rl *recursionListener) ExitEveryRule(ctx antlr.ParserRuleContext) {
- if ctx == nil {
- return
- }
- ruleIndex := ctx.GetRuleIndex()
- if depth, found := rl.ruleTypeDepth[ruleIndex]; found && *depth > 0 {
- *depth--
- }
-}
-
-var _ antlr.ParseTreeListener = &recursionListener{}
-
-type tooManyErrors struct {
- errorReportingLimit int
-}
-
-func (t *tooManyErrors) Error() string {
- return fmt.Sprintf("More than %d syntax errors", t.errorReportingLimit)
-}
-
-var _ error = &tooManyErrors{}
-
-type recoveryLimitError struct {
- message string
-}
-
-// Error implements error.
-func (rl *recoveryLimitError) Error() string {
- return rl.message
-}
-
-type lookaheadLimitError struct {
- message string
-}
-
-func (ll *lookaheadLimitError) Error() string {
- return ll.message
-}
-
-var _ error = &recoveryLimitError{}
-
-type recoveryLimitErrorStrategy struct {
- *antlr.DefaultErrorStrategy
- errorRecoveryLimit int
- errorRecoveryTokenLookaheadLimit int
- recoveryAttempts int
-}
-
-type lookaheadConsumer struct {
- antlr.Parser
- errorRecoveryTokenLookaheadLimit int
- lookaheadAttempts int
-}
-
-func (lc *lookaheadConsumer) Consume() antlr.Token {
- if lc.lookaheadAttempts >= lc.errorRecoveryTokenLookaheadLimit {
- panic(&lookaheadLimitError{
- message: fmt.Sprintf("error recovery token lookahead limit exceeded: %d", lc.errorRecoveryTokenLookaheadLimit),
- })
- }
- lc.lookaheadAttempts++
- return lc.Parser.Consume()
-}
-
-func (rl *recoveryLimitErrorStrategy) Recover(recognizer antlr.Parser, e antlr.RecognitionException) {
- rl.checkAttempts(recognizer)
- lc := &lookaheadConsumer{Parser: recognizer, errorRecoveryTokenLookaheadLimit: rl.errorRecoveryTokenLookaheadLimit}
- rl.DefaultErrorStrategy.Recover(lc, e)
-}
-
-func (rl *recoveryLimitErrorStrategy) RecoverInline(recognizer antlr.Parser) antlr.Token {
- rl.checkAttempts(recognizer)
- lc := &lookaheadConsumer{Parser: recognizer, errorRecoveryTokenLookaheadLimit: rl.errorRecoveryTokenLookaheadLimit}
- return rl.DefaultErrorStrategy.RecoverInline(lc)
-}
-
-func (rl *recoveryLimitErrorStrategy) checkAttempts(recognizer antlr.Parser) {
- if rl.recoveryAttempts == rl.errorRecoveryLimit {
- rl.recoveryAttempts++
- msg := fmt.Sprintf("error recovery attempt limit exceeded: %d", rl.errorRecoveryLimit)
- recognizer.NotifyErrorListeners(msg, nil, nil)
- panic(&recoveryLimitError{
- message: msg,
- })
- }
- rl.recoveryAttempts++
-}
-
-var _ antlr.ErrorStrategy = &recoveryLimitErrorStrategy{}
-
-type parser struct {
- gen.BaseCELVisitor
- errors *parseErrors
- exprFactory ast.ExprFactory
- helper *parserHelper
- macros map[string]Macro
- recursionDepth int
- errorReports int
- maxRecursionDepth int
- errorReportingLimit int
- errorRecoveryLimit int
- errorRecoveryLookaheadTokenLimit int
- populateMacroCalls bool
- enableOptionalSyntax bool
- enableVariadicOperatorASTs bool
-}
-
-var _ gen.CELVisitor = (*parser)(nil)
-
-func (p *parser) parse(expr runes.Buffer, desc string) ast.Expr {
- lexer := gen.NewCELLexer(newCharStream(expr, desc))
- lexer.RemoveErrorListeners()
- lexer.AddErrorListener(p)
-
- prsr := gen.NewCELParser(antlr.NewCommonTokenStream(lexer, 0))
- prsr.RemoveErrorListeners()
-
- prsrListener := &recursionListener{
- maxDepth: p.maxRecursionDepth,
- ruleTypeDepth: map[int]*int{},
- }
-
- prsr.AddErrorListener(p)
- prsr.AddParseListener(prsrListener)
-
- prsr.SetErrorHandler(&recoveryLimitErrorStrategy{
- DefaultErrorStrategy: antlr.NewDefaultErrorStrategy(),
- errorRecoveryLimit: p.errorRecoveryLimit,
- errorRecoveryTokenLookaheadLimit: p.errorRecoveryLookaheadTokenLimit,
- })
-
- defer func() {
- if val := recover(); val != nil {
- switch err := val.(type) {
- case *lookaheadLimitError:
- p.errors.internalError(err.Error())
- case *recursionError:
- p.errors.internalError(err.Error())
- case *tooManyErrors:
- // do nothing
- case *recoveryLimitError:
- // do nothing, listeners already notified and error reported.
- default:
- panic(val)
- }
- }
- }()
-
- return p.Visit(prsr.Start_()).(ast.Expr)
-}
-
-// Visitor implementations.
-func (p *parser) Visit(tree antlr.ParseTree) any {
- t := unnest(tree)
- switch tree := t.(type) {
- case *gen.StartContext:
- return p.VisitStart(tree)
- case *gen.ExprContext:
- p.checkAndIncrementRecursionDepth()
- out := p.VisitExpr(tree)
- p.decrementRecursionDepth()
- return out
- case *gen.ConditionalAndContext:
- return p.VisitConditionalAnd(tree)
- case *gen.ConditionalOrContext:
- return p.VisitConditionalOr(tree)
- case *gen.RelationContext:
- p.checkAndIncrementRecursionDepth()
- out := p.VisitRelation(tree)
- p.decrementRecursionDepth()
- return out
- case *gen.CalcContext:
- p.checkAndIncrementRecursionDepth()
- out := p.VisitCalc(tree)
- p.decrementRecursionDepth()
- return out
- case *gen.LogicalNotContext:
- return p.VisitLogicalNot(tree)
- case *gen.IdentOrGlobalCallContext:
- return p.VisitIdentOrGlobalCall(tree)
- case *gen.SelectContext:
- p.checkAndIncrementRecursionDepth()
- out := p.VisitSelect(tree)
- p.decrementRecursionDepth()
- return out
- case *gen.MemberCallContext:
- p.checkAndIncrementRecursionDepth()
- out := p.VisitMemberCall(tree)
- p.decrementRecursionDepth()
- return out
- case *gen.MapInitializerListContext:
- return p.VisitMapInitializerList(tree)
- case *gen.NegateContext:
- return p.VisitNegate(tree)
- case *gen.IndexContext:
- p.checkAndIncrementRecursionDepth()
- out := p.VisitIndex(tree)
- p.decrementRecursionDepth()
- return out
- case *gen.UnaryContext:
- return p.VisitUnary(tree)
- case *gen.CreateListContext:
- return p.VisitCreateList(tree)
- case *gen.CreateMessageContext:
- return p.VisitCreateMessage(tree)
- case *gen.CreateStructContext:
- return p.VisitCreateStruct(tree)
- case *gen.IntContext:
- return p.VisitInt(tree)
- case *gen.UintContext:
- return p.VisitUint(tree)
- case *gen.DoubleContext:
- return p.VisitDouble(tree)
- case *gen.StringContext:
- return p.VisitString(tree)
- case *gen.BytesContext:
- return p.VisitBytes(tree)
- case *gen.BoolFalseContext:
- return p.VisitBoolFalse(tree)
- case *gen.BoolTrueContext:
- return p.VisitBoolTrue(tree)
- case *gen.NullContext:
- return p.VisitNull(tree)
- }
-
- // Report at least one error if the parser reaches an unknown parse element.
- // Typically, this happens if the parser has already encountered a syntax error elsewhere.
- if p.errors.errorCount() == 0 {
- txt := "<>"
- if t != nil {
- txt = fmt.Sprintf("<<%T>>", t)
- }
- return p.reportError(common.NoLocation, "unknown parse element encountered: %s", txt)
- }
- return p.helper.newExpr(common.NoLocation)
-
-}
-
-// Visit a parse tree produced by CELParser#start.
-func (p *parser) VisitStart(ctx *gen.StartContext) any {
- return p.Visit(ctx.Expr())
-}
-
-// Visit a parse tree produced by CELParser#expr.
-func (p *parser) VisitExpr(ctx *gen.ExprContext) any {
- result := p.Visit(ctx.GetE()).(ast.Expr)
- if ctx.GetOp() == nil {
- return result
- }
- opID := p.helper.id(ctx.GetOp())
- ifTrue := p.Visit(ctx.GetE1()).(ast.Expr)
- ifFalse := p.Visit(ctx.GetE2()).(ast.Expr)
- return p.globalCallOrMacro(opID, operators.Conditional, result, ifTrue, ifFalse)
-}
-
-// Visit a parse tree produced by CELParser#conditionalOr.
-func (p *parser) VisitConditionalOr(ctx *gen.ConditionalOrContext) any {
- result := p.Visit(ctx.GetE()).(ast.Expr)
- l := p.newLogicManager(operators.LogicalOr, result)
- rest := ctx.GetE1()
- for i, op := range ctx.GetOps() {
- if i >= len(rest) {
- return p.reportError(ctx, "unexpected character, wanted '||'")
- }
- next := p.Visit(rest[i]).(ast.Expr)
- opID := p.helper.id(op)
- l.addTerm(opID, next)
- }
- return l.toExpr()
-}
-
-// Visit a parse tree produced by CELParser#conditionalAnd.
-func (p *parser) VisitConditionalAnd(ctx *gen.ConditionalAndContext) any {
- result := p.Visit(ctx.GetE()).(ast.Expr)
- l := p.newLogicManager(operators.LogicalAnd, result)
- rest := ctx.GetE1()
- for i, op := range ctx.GetOps() {
- if i >= len(rest) {
- return p.reportError(ctx, "unexpected character, wanted '&&'")
- }
- next := p.Visit(rest[i]).(ast.Expr)
- opID := p.helper.id(op)
- l.addTerm(opID, next)
- }
- return l.toExpr()
-}
-
-// Visit a parse tree produced by CELParser#relation.
-func (p *parser) VisitRelation(ctx *gen.RelationContext) any {
- opText := ""
- if ctx.GetOp() != nil {
- opText = ctx.GetOp().GetText()
- }
- if op, found := operators.Find(opText); found {
- lhs := p.Visit(ctx.Relation(0)).(ast.Expr)
- opID := p.helper.id(ctx.GetOp())
- rhs := p.Visit(ctx.Relation(1)).(ast.Expr)
- return p.globalCallOrMacro(opID, op, lhs, rhs)
- }
- return p.reportError(ctx, "operator not found")
-}
-
-// Visit a parse tree produced by CELParser#calc.
-func (p *parser) VisitCalc(ctx *gen.CalcContext) any {
- opText := ""
- if ctx.GetOp() != nil {
- opText = ctx.GetOp().GetText()
- }
- if op, found := operators.Find(opText); found {
- lhs := p.Visit(ctx.Calc(0)).(ast.Expr)
- opID := p.helper.id(ctx.GetOp())
- rhs := p.Visit(ctx.Calc(1)).(ast.Expr)
- return p.globalCallOrMacro(opID, op, lhs, rhs)
- }
- return p.reportError(ctx, "operator not found")
-}
-
-func (p *parser) VisitUnary(ctx *gen.UnaryContext) any {
- return p.helper.newLiteralString(ctx, "<>")
-}
-
-// Visit a parse tree produced by CELParser#LogicalNot.
-func (p *parser) VisitLogicalNot(ctx *gen.LogicalNotContext) any {
- if len(ctx.GetOps())%2 == 0 {
- return p.Visit(ctx.Member())
- }
- opID := p.helper.id(ctx.GetOps()[0])
- target := p.Visit(ctx.Member()).(ast.Expr)
- return p.globalCallOrMacro(opID, operators.LogicalNot, target)
-}
-
-func (p *parser) VisitNegate(ctx *gen.NegateContext) any {
- if len(ctx.GetOps())%2 == 0 {
- return p.Visit(ctx.Member())
- }
- opID := p.helper.id(ctx.GetOps()[0])
- target := p.Visit(ctx.Member()).(ast.Expr)
- return p.globalCallOrMacro(opID, operators.Negate, target)
-}
-
-// VisitSelect visits a parse tree produced by CELParser#Select.
-func (p *parser) VisitSelect(ctx *gen.SelectContext) any {
- operand := p.Visit(ctx.Member()).(ast.Expr)
- // Handle the error case where no valid identifier is specified.
- if ctx.GetId() == nil || ctx.GetOp() == nil {
- return p.helper.newExpr(ctx)
- }
- id := ctx.GetId().GetText()
- if ctx.GetOpt() != nil {
- if !p.enableOptionalSyntax {
- return p.reportError(ctx.GetOp(), "unsupported syntax '.?'")
- }
- return p.helper.newGlobalCall(
- ctx.GetOp(),
- operators.OptSelect,
- operand,
- p.helper.newLiteralString(ctx.GetId(), id))
- }
- return p.helper.newSelect(ctx.GetOp(), operand, id)
-}
-
-// VisitMemberCall visits a parse tree produced by CELParser#MemberCall.
-func (p *parser) VisitMemberCall(ctx *gen.MemberCallContext) any {
- operand := p.Visit(ctx.Member()).(ast.Expr)
- // Handle the error case where no valid identifier is specified.
- if ctx.GetId() == nil {
- return p.helper.newExpr(ctx)
- }
- id := ctx.GetId().GetText()
- opID := p.helper.id(ctx.GetOpen())
- return p.receiverCallOrMacro(opID, id, operand, p.visitExprList(ctx.GetArgs())...)
-}
-
-// Visit a parse tree produced by CELParser#Index.
-func (p *parser) VisitIndex(ctx *gen.IndexContext) any {
- target := p.Visit(ctx.Member()).(ast.Expr)
- // Handle the error case where no valid identifier is specified.
- if ctx.GetOp() == nil {
- return p.helper.newExpr(ctx)
- }
- opID := p.helper.id(ctx.GetOp())
- index := p.Visit(ctx.GetIndex()).(ast.Expr)
- operator := operators.Index
- if ctx.GetOpt() != nil {
- if !p.enableOptionalSyntax {
- return p.reportError(ctx.GetOp(), "unsupported syntax '[?'")
- }
- operator = operators.OptIndex
- }
- return p.globalCallOrMacro(opID, operator, target, index)
-}
-
-// Visit a parse tree produced by CELParser#CreateMessage.
-func (p *parser) VisitCreateMessage(ctx *gen.CreateMessageContext) any {
- messageName := ""
- for _, id := range ctx.GetIds() {
- if len(messageName) != 0 {
- messageName += "."
- }
- messageName += id.GetText()
- }
- if ctx.GetLeadingDot() != nil {
- messageName = "." + messageName
- }
- objID := p.helper.id(ctx.GetOp())
- entries := p.VisitIFieldInitializerList(ctx.GetEntries()).([]ast.EntryExpr)
- return p.helper.newObject(objID, messageName, entries...)
-}
-
-// Visit a parse tree of field initializers.
-func (p *parser) VisitIFieldInitializerList(ctx gen.IFieldInitializerListContext) any {
- if ctx == nil || ctx.GetFields() == nil {
- // This is the result of a syntax error handled elswhere, return empty.
- return []ast.EntryExpr{}
- }
-
- result := make([]ast.EntryExpr, len(ctx.GetFields()))
- cols := ctx.GetCols()
- vals := ctx.GetValues()
- for i, f := range ctx.GetFields() {
- if i >= len(cols) || i >= len(vals) {
- // This is the result of a syntax error detected elsewhere.
- return []ast.EntryExpr{}
- }
- initID := p.helper.id(cols[i])
- optField := f.(*gen.OptFieldContext)
- optional := optField.GetOpt() != nil
- if !p.enableOptionalSyntax && optional {
- p.reportError(optField, "unsupported syntax '?'")
- continue
- }
- // The field may be empty due to a prior error.
- id := optField.IDENTIFIER()
- if id == nil {
- return []ast.EntryExpr{}
- }
- fieldName := id.GetText()
- value := p.Visit(vals[i]).(ast.Expr)
- field := p.helper.newObjectField(initID, fieldName, value, optional)
- result[i] = field
- }
- return result
-}
-
-// Visit a parse tree produced by CELParser#IdentOrGlobalCall.
-func (p *parser) VisitIdentOrGlobalCall(ctx *gen.IdentOrGlobalCallContext) any {
- identName := ""
- if ctx.GetLeadingDot() != nil {
- identName = "."
- }
- // Handle the error case where no valid identifier is specified.
- if ctx.GetId() == nil {
- return p.helper.newExpr(ctx)
- }
- // Handle reserved identifiers.
- id := ctx.GetId().GetText()
- if _, ok := reservedIds[id]; ok {
- return p.reportError(ctx, "reserved identifier: %s", id)
- }
- identName += id
- if ctx.GetOp() != nil {
- opID := p.helper.id(ctx.GetOp())
- return p.globalCallOrMacro(opID, identName, p.visitExprList(ctx.GetArgs())...)
- }
- return p.helper.newIdent(ctx.GetId(), identName)
-}
-
-// Visit a parse tree produced by CELParser#CreateList.
-func (p *parser) VisitCreateList(ctx *gen.CreateListContext) any {
- listID := p.helper.id(ctx.GetOp())
- elems, optionals := p.visitListInit(ctx.GetElems())
- return p.helper.newList(listID, elems, optionals...)
-}
-
-// Visit a parse tree produced by CELParser#CreateStruct.
-func (p *parser) VisitCreateStruct(ctx *gen.CreateStructContext) any {
- structID := p.helper.id(ctx.GetOp())
- entries := []ast.EntryExpr{}
- if ctx.GetEntries() != nil {
- entries = p.Visit(ctx.GetEntries()).([]ast.EntryExpr)
- }
- return p.helper.newMap(structID, entries...)
-}
-
-// Visit a parse tree produced by CELParser#mapInitializerList.
-func (p *parser) VisitMapInitializerList(ctx *gen.MapInitializerListContext) any {
- if ctx == nil || ctx.GetKeys() == nil {
- // This is the result of a syntax error handled elswhere, return empty.
- return []ast.EntryExpr{}
- }
-
- result := make([]ast.EntryExpr, len(ctx.GetCols()))
- keys := ctx.GetKeys()
- vals := ctx.GetValues()
- for i, col := range ctx.GetCols() {
- colID := p.helper.id(col)
- if i >= len(keys) || i >= len(vals) {
- // This is the result of a syntax error detected elsewhere.
- return []ast.EntryExpr{}
- }
- optKey := keys[i]
- optional := optKey.GetOpt() != nil
- if !p.enableOptionalSyntax && optional {
- p.reportError(optKey, "unsupported syntax '?'")
- continue
- }
- key := p.Visit(optKey.GetE()).(ast.Expr)
- value := p.Visit(vals[i]).(ast.Expr)
- entry := p.helper.newMapEntry(colID, key, value, optional)
- result[i] = entry
- }
- return result
-}
-
-// Visit a parse tree produced by CELParser#Int.
-func (p *parser) VisitInt(ctx *gen.IntContext) any {
- text := ctx.GetTok().GetText()
- base := 10
- if strings.HasPrefix(text, "0x") {
- base = 16
- text = text[2:]
- }
- if ctx.GetSign() != nil {
- text = ctx.GetSign().GetText() + text
- }
- i, err := strconv.ParseInt(text, base, 64)
- if err != nil {
- return p.reportError(ctx, "invalid int literal")
- }
- return p.helper.newLiteralInt(ctx, i)
-}
-
-// Visit a parse tree produced by CELParser#Uint.
-func (p *parser) VisitUint(ctx *gen.UintContext) any {
- text := ctx.GetTok().GetText()
- // trim the 'u' designator included in the uint literal.
- text = text[:len(text)-1]
- base := 10
- if strings.HasPrefix(text, "0x") {
- base = 16
- text = text[2:]
- }
- i, err := strconv.ParseUint(text, base, 64)
- if err != nil {
- return p.reportError(ctx, "invalid uint literal")
- }
- return p.helper.newLiteralUint(ctx, i)
-}
-
-// Visit a parse tree produced by CELParser#Double.
-func (p *parser) VisitDouble(ctx *gen.DoubleContext) any {
- txt := ctx.GetTok().GetText()
- if ctx.GetSign() != nil {
- txt = ctx.GetSign().GetText() + txt
- }
- f, err := strconv.ParseFloat(txt, 64)
- if err != nil {
- return p.reportError(ctx, "invalid double literal")
- }
- return p.helper.newLiteralDouble(ctx, f)
-
-}
-
-// Visit a parse tree produced by CELParser#String.
-func (p *parser) VisitString(ctx *gen.StringContext) any {
- s := p.unquote(ctx, ctx.GetText(), false)
- return p.helper.newLiteralString(ctx, s)
-}
-
-// Visit a parse tree produced by CELParser#Bytes.
-func (p *parser) VisitBytes(ctx *gen.BytesContext) any {
- b := []byte(p.unquote(ctx, ctx.GetTok().GetText()[1:], true))
- return p.helper.newLiteralBytes(ctx, b)
-}
-
-// Visit a parse tree produced by CELParser#BoolTrue.
-func (p *parser) VisitBoolTrue(ctx *gen.BoolTrueContext) any {
- return p.helper.newLiteralBool(ctx, true)
-}
-
-// Visit a parse tree produced by CELParser#BoolFalse.
-func (p *parser) VisitBoolFalse(ctx *gen.BoolFalseContext) any {
- return p.helper.newLiteralBool(ctx, false)
-}
-
-// Visit a parse tree produced by CELParser#Null.
-func (p *parser) VisitNull(ctx *gen.NullContext) any {
- return p.helper.exprFactory.NewLiteral(p.helper.newID(ctx), types.NullValue)
-}
-
-func (p *parser) visitExprList(ctx gen.IExprListContext) []ast.Expr {
- if ctx == nil {
- return []ast.Expr{}
- }
- return p.visitSlice(ctx.GetE())
-}
-
-func (p *parser) visitListInit(ctx gen.IListInitContext) ([]ast.Expr, []int32) {
- if ctx == nil {
- return []ast.Expr{}, []int32{}
- }
- elements := ctx.GetElems()
- result := make([]ast.Expr, len(elements))
- optionals := []int32{}
- for i, e := range elements {
- ex := p.Visit(e.GetE()).(ast.Expr)
- if ex == nil {
- return []ast.Expr{}, []int32{}
- }
- result[i] = ex
- if e.GetOpt() != nil {
- if !p.enableOptionalSyntax {
- p.reportError(e.GetOpt(), "unsupported syntax '?'")
- continue
- }
- optionals = append(optionals, int32(i))
- }
- }
- return result, optionals
-}
-
-func (p *parser) visitSlice(expressions []gen.IExprContext) []ast.Expr {
- if expressions == nil {
- return []ast.Expr{}
- }
- result := make([]ast.Expr, len(expressions))
- for i, e := range expressions {
- ex := p.Visit(e).(ast.Expr)
- result[i] = ex
- }
- return result
-}
-
-func (p *parser) unquote(ctx any, value string, isBytes bool) string {
- text, err := unescape(value, isBytes)
- if err != nil {
- p.reportError(ctx, "%s", err.Error())
- return value
- }
- return text
-}
-
-func (p *parser) newLogicManager(function string, term ast.Expr) *logicManager {
- if p.enableVariadicOperatorASTs {
- return newVariadicLogicManager(p.exprFactory, function, term)
- }
- return newBalancingLogicManager(p.exprFactory, function, term)
-}
-
-func (p *parser) reportError(ctx any, format string, args ...any) ast.Expr {
- var location common.Location
- err := p.helper.newExpr(ctx)
- switch c := ctx.(type) {
- case common.Location:
- location = c
- case antlr.Token, antlr.ParserRuleContext:
- location = p.helper.getLocation(err.ID())
- }
- // Provide arguments to the report error.
- p.errors.reportErrorAtID(err.ID(), location, format, args...)
- return err
-}
-
-// ANTLR Parse listener implementations
-func (p *parser) SyntaxError(recognizer antlr.Recognizer, offendingSymbol any, line, column int, msg string, e antlr.RecognitionException) {
- l := p.helper.source.NewLocation(line, column)
- // Hack to keep existing error messages consistent with previous versions of CEL when a reserved word
- // is used as an identifier. This behavior needs to be overhauled to provide consistent, normalized error
- // messages out of ANTLR to prevent future breaking changes related to error message content.
- if strings.Contains(msg, "no viable alternative") {
- msg = reservedIdentifier.ReplaceAllString(msg, mismatchedReservedIdentifier)
- }
- // Ensure that no more than 100 syntax errors are reported as this will halt attempts to recover from a
- // seriously broken expression.
- if p.errorReports < p.errorReportingLimit {
- p.errorReports++
- p.errors.syntaxError(l, msg)
- } else {
- tme := &tooManyErrors{errorReportingLimit: p.errorReportingLimit}
- p.errors.syntaxError(l, tme.Error())
- panic(tme)
- }
-}
-
-func (p *parser) ReportAmbiguity(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex int, exact bool, ambigAlts *antlr.BitSet, configs *antlr.ATNConfigSet) {
- // Intentional
-}
-
-func (p *parser) ReportAttemptingFullContext(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex int, conflictingAlts *antlr.BitSet, configs *antlr.ATNConfigSet) {
- // Intentional
-}
-
-func (p *parser) ReportContextSensitivity(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex, prediction int, configs *antlr.ATNConfigSet) {
- // Intentional
-}
-
-func (p *parser) globalCallOrMacro(exprID int64, function string, args ...ast.Expr) ast.Expr {
- if expr, found := p.expandMacro(exprID, function, nil, args...); found {
- return expr
- }
- return p.helper.newGlobalCall(exprID, function, args...)
-}
-
-func (p *parser) receiverCallOrMacro(exprID int64, function string, target ast.Expr, args ...ast.Expr) ast.Expr {
- if expr, found := p.expandMacro(exprID, function, target, args...); found {
- return expr
- }
- return p.helper.newReceiverCall(exprID, function, target, args...)
-}
-
-func (p *parser) expandMacro(exprID int64, function string, target ast.Expr, args ...ast.Expr) (ast.Expr, bool) {
- macro, found := p.macros[makeMacroKey(function, len(args), target != nil)]
- if !found {
- macro, found = p.macros[makeVarArgMacroKey(function, target != nil)]
- if !found {
- return nil, false
- }
- }
- eh := exprHelperPool.Get().(*exprHelper)
- defer exprHelperPool.Put(eh)
- eh.parserHelper = p.helper
- eh.id = exprID
- expr, err := macro.Expander()(eh, target, args)
- // An error indicates that the macro was matched, but the arguments were not well-formed.
- if err != nil {
- if err.Location != nil {
- return p.reportError(err.Location, err.Message), true
- }
- return p.reportError(p.helper.getLocation(exprID), err.Message), true
- }
- // A nil value from the macro indicates that the macro implementation decided that
- // an expansion should not be performed.
- if expr == nil {
- return nil, false
- }
- if p.populateMacroCalls {
- p.helper.addMacroCall(expr.ID(), function, target, args...)
- }
- return expr, true
-}
-
-func (p *parser) checkAndIncrementRecursionDepth() {
- p.recursionDepth++
- if p.recursionDepth > p.maxRecursionDepth {
- panic(&recursionError{message: "max recursion depth exceeded"})
- }
-}
-
-func (p *parser) decrementRecursionDepth() {
- p.recursionDepth--
-}
-
-// unnest traverses down the left-hand side of the parse graph until it encounters the first compound
-// parse node or the first leaf in the parse graph.
-func unnest(tree antlr.ParseTree) antlr.ParseTree {
- for tree != nil {
- switch t := tree.(type) {
- case *gen.ExprContext:
- // conditionalOr op='?' conditionalOr : expr
- if t.GetOp() != nil {
- return t
- }
- // conditionalOr
- tree = t.GetE()
- case *gen.ConditionalOrContext:
- // conditionalAnd (ops=|| conditionalAnd)*
- if t.GetOps() != nil && len(t.GetOps()) > 0 {
- return t
- }
- // conditionalAnd
- tree = t.GetE()
- case *gen.ConditionalAndContext:
- // relation (ops=&& relation)*
- if t.GetOps() != nil && len(t.GetOps()) > 0 {
- return t
- }
- // relation
- tree = t.GetE()
- case *gen.RelationContext:
- // relation op relation
- if t.GetOp() != nil {
- return t
- }
- // calc
- tree = t.Calc()
- case *gen.CalcContext:
- // calc op calc
- if t.GetOp() != nil {
- return t
- }
- // unary
- tree = t.Unary()
- case *gen.MemberExprContext:
- // member expands to one of: primary, select, index, or create message
- tree = t.Member()
- case *gen.PrimaryExprContext:
- // primary expands to one of identifier, nested, create list, create struct, literal
- tree = t.Primary()
- case *gen.NestedContext:
- // contains a nested 'expr'
- tree = t.GetE()
- case *gen.ConstantLiteralContext:
- // expands to a primitive literal
- tree = t.Literal()
- default:
- return t
- }
- }
- return tree
-}
-
-var (
- reservedIdentifier = regexp.MustCompile("no viable alternative at input '.(true|false|null)'")
- mismatchedReservedIdentifier = "mismatched input '$1' expecting IDENTIFIER"
-)
diff --git a/vendor/github.com/google/cel-go/parser/unescape.go b/vendor/github.com/google/cel-go/parser/unescape.go
deleted file mode 100644
index 27c57a9f3..000000000
--- a/vendor/github.com/google/cel-go/parser/unescape.go
+++ /dev/null
@@ -1,237 +0,0 @@
-// Copyright 2018 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package parser
-
-import (
- "fmt"
- "strings"
- "unicode/utf8"
-)
-
-// Unescape takes a quoted string, unquotes, and unescapes it.
-//
-// This function performs escaping compatible with GoogleSQL.
-func unescape(value string, isBytes bool) (string, error) {
- // All strings normalize newlines to the \n representation.
- value = newlineNormalizer.Replace(value)
- n := len(value)
-
- // Nothing to unescape / decode.
- if n < 2 {
- return value, fmt.Errorf("unable to unescape string")
- }
-
- // Raw string preceded by the 'r|R' prefix.
- isRawLiteral := false
- if value[0] == 'r' || value[0] == 'R' {
- value = value[1:]
- n = len(value)
- isRawLiteral = true
- }
-
- // Quoted string of some form, must have same first and last char.
- if value[0] != value[n-1] || (value[0] != '"' && value[0] != '\'') {
- return value, fmt.Errorf("unable to unescape string")
- }
-
- // Normalize the multi-line CEL string representation to a standard
- // Go quoted string.
- if n >= 6 {
- if strings.HasPrefix(value, "'''") {
- if !strings.HasSuffix(value, "'''") {
- return value, fmt.Errorf("unable to unescape string")
- }
- value = "\"" + value[3:n-3] + "\""
- } else if strings.HasPrefix(value, `"""`) {
- if !strings.HasSuffix(value, `"""`) {
- return value, fmt.Errorf("unable to unescape string")
- }
- value = "\"" + value[3:n-3] + "\""
- }
- n = len(value)
- }
- value = value[1 : n-1]
- // If there is nothing to escape, then return.
- if isRawLiteral || !strings.ContainsRune(value, '\\') {
- return value, nil
- }
-
- // Otherwise the string contains escape characters.
- // The following logic is adapted from `strconv/quote.go`
- var runeTmp [utf8.UTFMax]byte
- buf := make([]byte, 0, 3*n/2)
- for len(value) > 0 {
- c, encode, rest, err := unescapeChar(value, isBytes)
- if err != nil {
- return "", err
- }
- value = rest
- if c < utf8.RuneSelf || !encode {
- buf = append(buf, byte(c))
- } else {
- n := utf8.EncodeRune(runeTmp[:], c)
- buf = append(buf, runeTmp[:n]...)
- }
- }
- return string(buf), nil
-}
-
-// unescapeChar takes a string input and returns the following info:
-//
-// value - the escaped unicode rune at the front of the string.
-// encode - the value should be unicode-encoded
-// tail - the remainder of the input string.
-// err - error value, if the character could not be unescaped.
-//
-// When encode is true the return value may still fit within a single byte,
-// but unicode encoding is attempted which is more expensive than when the
-// value is known to self-represent as a single byte.
-//
-// If isBytes is set, unescape as a bytes literal so octal and hex escapes
-// represent byte values, not unicode code points.
-func unescapeChar(s string, isBytes bool) (value rune, encode bool, tail string, err error) {
- // 1. Character is not an escape sequence.
- switch c := s[0]; {
- case c >= utf8.RuneSelf:
- r, size := utf8.DecodeRuneInString(s)
- return r, true, s[size:], nil
- case c != '\\':
- return rune(s[0]), false, s[1:], nil
- }
-
- // 2. Last character is the start of an escape sequence.
- if len(s) <= 1 {
- err = fmt.Errorf("unable to unescape string, found '\\' as last character")
- return
- }
-
- c := s[1]
- s = s[2:]
- // 3. Common escape sequences shared with Google SQL
- switch c {
- case 'a':
- value = '\a'
- case 'b':
- value = '\b'
- case 'f':
- value = '\f'
- case 'n':
- value = '\n'
- case 'r':
- value = '\r'
- case 't':
- value = '\t'
- case 'v':
- value = '\v'
- case '\\':
- value = '\\'
- case '\'':
- value = '\''
- case '"':
- value = '"'
- case '`':
- value = '`'
- case '?':
- value = '?'
-
- // 4. Unicode escape sequences, reproduced from `strconv/quote.go`
- case 'x', 'X', 'u', 'U':
- n := 0
- encode = true
- switch c {
- case 'x', 'X':
- n = 2
- encode = !isBytes
- case 'u':
- n = 4
- if isBytes {
- err = fmt.Errorf("unable to unescape string")
- return
- }
- case 'U':
- n = 8
- if isBytes {
- err = fmt.Errorf("unable to unescape string")
- return
- }
- }
- var v rune
- if len(s) < n {
- err = fmt.Errorf("unable to unescape string")
- return
- }
- for j := 0; j < n; j++ {
- x, ok := unhex(s[j])
- if !ok {
- err = fmt.Errorf("unable to unescape string")
- return
- }
- v = v<<4 | x
- }
- s = s[n:]
- if !isBytes && v > utf8.MaxRune {
- err = fmt.Errorf("unable to unescape string")
- return
- }
- value = v
-
- // 5. Octal escape sequences, must be three digits \[0-3][0-7][0-7]
- case '0', '1', '2', '3':
- if len(s) < 2 {
- err = fmt.Errorf("unable to unescape octal sequence in string")
- return
- }
- v := rune(c - '0')
- for j := 0; j < 2; j++ {
- x := s[j]
- if x < '0' || x > '7' {
- err = fmt.Errorf("unable to unescape octal sequence in string")
- return
- }
- v = v*8 + rune(x-'0')
- }
- if !isBytes && v > utf8.MaxRune {
- err = fmt.Errorf("unable to unescape string")
- return
- }
- value = v
- s = s[2:]
- encode = !isBytes
-
- // Unknown escape sequence.
- default:
- err = fmt.Errorf("unable to unescape string")
- }
-
- tail = s
- return
-}
-
-func unhex(b byte) (rune, bool) {
- c := rune(b)
- switch {
- case '0' <= c && c <= '9':
- return c - '0', true
- case 'a' <= c && c <= 'f':
- return c - 'a' + 10, true
- case 'A' <= c && c <= 'F':
- return c - 'A' + 10, true
- }
- return 0, false
-}
-
-var (
- newlineNormalizer = strings.NewReplacer("\r\n", "\n", "\r", "\n")
-)
diff --git a/vendor/github.com/google/cel-go/parser/unparser.go b/vendor/github.com/google/cel-go/parser/unparser.go
deleted file mode 100644
index 91cf72944..000000000
--- a/vendor/github.com/google/cel-go/parser/unparser.go
+++ /dev/null
@@ -1,629 +0,0 @@
-// Copyright 2019 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package parser
-
-import (
- "errors"
- "fmt"
- "strconv"
- "strings"
-
- "github.com/google/cel-go/common/ast"
- "github.com/google/cel-go/common/operators"
- "github.com/google/cel-go/common/types"
-)
-
-// Unparse takes an input expression and source position information and generates a human-readable
-// expression.
-//
-// Note, unparsing an AST will often generate the same expression as was originally parsed, but some
-// formatting may be lost in translation, notably:
-//
-// - All quoted literals are doubled quoted.
-// - Byte literals are represented as octal escapes (same as Google SQL).
-// - Floating point values are converted to the small number of digits needed to represent the value.
-// - Spacing around punctuation marks may be lost.
-// - Parentheses will only be applied when they affect operator precedence.
-//
-// This function optionally takes in one or more UnparserOption to alter the unparsing behavior, such as
-// performing word wrapping on expressions.
-func Unparse(expr ast.Expr, info *ast.SourceInfo, opts ...UnparserOption) (string, error) {
- unparserOpts := &unparserOption{
- wrapOnColumn: defaultWrapOnColumn,
- wrapAfterColumnLimit: defaultWrapAfterColumnLimit,
- operatorsToWrapOn: defaultOperatorsToWrapOn,
- }
-
- var err error
- for _, opt := range opts {
- unparserOpts, err = opt(unparserOpts)
- if err != nil {
- return "", err
- }
- }
-
- un := &unparser{
- info: info,
- options: unparserOpts,
- }
- err = un.visit(expr)
- if err != nil {
- return "", err
- }
- return un.str.String(), nil
-}
-
-// unparser visits an expression to reconstruct a human-readable string from an AST.
-type unparser struct {
- str strings.Builder
- info *ast.SourceInfo
- options *unparserOption
- lastWrappedIndex int
-}
-
-func (un *unparser) visit(expr ast.Expr) error {
- if expr == nil {
- return errors.New("unsupported expression")
- }
- visited, err := un.visitMaybeMacroCall(expr)
- if visited || err != nil {
- return err
- }
- switch expr.Kind() {
- case ast.CallKind:
- return un.visitCall(expr)
- case ast.LiteralKind:
- return un.visitConst(expr)
- case ast.IdentKind:
- return un.visitIdent(expr)
- case ast.ListKind:
- return un.visitList(expr)
- case ast.MapKind:
- return un.visitStructMap(expr)
- case ast.SelectKind:
- return un.visitSelect(expr)
- case ast.StructKind:
- return un.visitStructMsg(expr)
- default:
- return fmt.Errorf("unsupported expression: %v", expr)
- }
-}
-
-func (un *unparser) visitCall(expr ast.Expr) error {
- c := expr.AsCall()
- fun := c.FunctionName()
- switch fun {
- // ternary operator
- case operators.Conditional:
- return un.visitCallConditional(expr)
- // optional select operator
- case operators.OptSelect:
- return un.visitOptSelect(expr)
- // index operator
- case operators.Index:
- return un.visitCallIndex(expr)
- // optional index operator
- case operators.OptIndex:
- return un.visitCallOptIndex(expr)
- // unary operators
- case operators.LogicalNot, operators.Negate:
- return un.visitCallUnary(expr)
- // binary operators
- case operators.Add,
- operators.Divide,
- operators.Equals,
- operators.Greater,
- operators.GreaterEquals,
- operators.In,
- operators.Less,
- operators.LessEquals,
- operators.LogicalAnd,
- operators.LogicalOr,
- operators.Modulo,
- operators.Multiply,
- operators.NotEquals,
- operators.OldIn,
- operators.Subtract:
- return un.visitCallBinary(expr)
- // standard function calls.
- default:
- return un.visitCallFunc(expr)
- }
-}
-
-func (un *unparser) visitCallBinary(expr ast.Expr) error {
- c := expr.AsCall()
- fun := c.FunctionName()
- args := c.Args()
- lhs := args[0]
- // add parens if the current operator is lower precedence than the lhs expr operator.
- lhsParen := isComplexOperatorWithRespectTo(fun, lhs)
- rhs := args[1]
- // add parens if the current operator is lower precedence than the rhs expr operator,
- // or the same precedence and the operator is left recursive.
- rhsParen := isComplexOperatorWithRespectTo(fun, rhs)
- if !rhsParen && isLeftRecursive(fun) {
- rhsParen = isSamePrecedence(fun, rhs)
- }
- err := un.visitMaybeNested(lhs, lhsParen)
- if err != nil {
- return err
- }
- unmangled, found := operators.FindReverseBinaryOperator(fun)
- if !found {
- return fmt.Errorf("cannot unmangle operator: %s", fun)
- }
-
- un.writeOperatorWithWrapping(fun, unmangled)
- return un.visitMaybeNested(rhs, rhsParen)
-}
-
-func (un *unparser) visitCallConditional(expr ast.Expr) error {
- c := expr.AsCall()
- args := c.Args()
- // add parens if operand is a conditional itself.
- nested := isSamePrecedence(operators.Conditional, args[0]) ||
- isComplexOperator(args[0])
- err := un.visitMaybeNested(args[0], nested)
- if err != nil {
- return err
- }
- un.writeOperatorWithWrapping(operators.Conditional, "?")
-
- // add parens if operand is a conditional itself.
- nested = isSamePrecedence(operators.Conditional, args[1]) ||
- isComplexOperator(args[1])
- err = un.visitMaybeNested(args[1], nested)
- if err != nil {
- return err
- }
-
- un.str.WriteString(" : ")
- // add parens if operand is a conditional itself.
- nested = isSamePrecedence(operators.Conditional, args[2]) ||
- isComplexOperator(args[2])
-
- return un.visitMaybeNested(args[2], nested)
-}
-
-func (un *unparser) visitCallFunc(expr ast.Expr) error {
- c := expr.AsCall()
- fun := c.FunctionName()
- args := c.Args()
- if c.IsMemberFunction() {
- nested := isBinaryOrTernaryOperator(c.Target())
- err := un.visitMaybeNested(c.Target(), nested)
- if err != nil {
- return err
- }
- un.str.WriteString(".")
- }
- un.str.WriteString(fun)
- un.str.WriteString("(")
- for i, arg := range args {
- err := un.visit(arg)
- if err != nil {
- return err
- }
- if i < len(args)-1 {
- un.str.WriteString(", ")
- }
- }
- un.str.WriteString(")")
- return nil
-}
-
-func (un *unparser) visitCallIndex(expr ast.Expr) error {
- return un.visitCallIndexInternal(expr, "[")
-}
-
-func (un *unparser) visitCallOptIndex(expr ast.Expr) error {
- return un.visitCallIndexInternal(expr, "[?")
-}
-
-func (un *unparser) visitCallIndexInternal(expr ast.Expr, op string) error {
- c := expr.AsCall()
- args := c.Args()
- nested := isBinaryOrTernaryOperator(args[0])
- err := un.visitMaybeNested(args[0], nested)
- if err != nil {
- return err
- }
- un.str.WriteString(op)
- err = un.visit(args[1])
- if err != nil {
- return err
- }
- un.str.WriteString("]")
- return nil
-}
-
-func (un *unparser) visitCallUnary(expr ast.Expr) error {
- c := expr.AsCall()
- fun := c.FunctionName()
- args := c.Args()
- unmangled, found := operators.FindReverse(fun)
- if !found {
- return fmt.Errorf("cannot unmangle operator: %s", fun)
- }
- un.str.WriteString(unmangled)
- nested := isComplexOperator(args[0])
- return un.visitMaybeNested(args[0], nested)
-}
-
-func (un *unparser) visitConst(expr ast.Expr) error {
- val := expr.AsLiteral()
- switch val := val.(type) {
- case types.Bool:
- un.str.WriteString(strconv.FormatBool(bool(val)))
- case types.Bytes:
- // bytes constants are surrounded with b""
- un.str.WriteString(`b"`)
- un.str.WriteString(bytesToOctets([]byte(val)))
- un.str.WriteString(`"`)
- case types.Double:
- // represent the float using the minimum required digits
- d := strconv.FormatFloat(float64(val), 'g', -1, 64)
- un.str.WriteString(d)
- if !strings.Contains(d, ".") {
- un.str.WriteString(".0")
- }
- case types.Int:
- i := strconv.FormatInt(int64(val), 10)
- un.str.WriteString(i)
- case types.Null:
- un.str.WriteString("null")
- case types.String:
- // strings will be double quoted with quotes escaped.
- un.str.WriteString(strconv.Quote(string(val)))
- case types.Uint:
- // uint literals have a 'u' suffix.
- ui := strconv.FormatUint(uint64(val), 10)
- un.str.WriteString(ui)
- un.str.WriteString("u")
- default:
- return fmt.Errorf("unsupported constant: %v", expr)
- }
- return nil
-}
-
-func (un *unparser) visitIdent(expr ast.Expr) error {
- un.str.WriteString(expr.AsIdent())
- return nil
-}
-
-func (un *unparser) visitList(expr ast.Expr) error {
- l := expr.AsList()
- elems := l.Elements()
- optIndices := make(map[int]bool, len(elems))
- for _, idx := range l.OptionalIndices() {
- optIndices[int(idx)] = true
- }
- un.str.WriteString("[")
- for i, elem := range elems {
- if optIndices[i] {
- un.str.WriteString("?")
- }
- err := un.visit(elem)
- if err != nil {
- return err
- }
- if i < len(elems)-1 {
- un.str.WriteString(", ")
- }
- }
- un.str.WriteString("]")
- return nil
-}
-
-func (un *unparser) visitOptSelect(expr ast.Expr) error {
- c := expr.AsCall()
- args := c.Args()
- operand := args[0]
- field := args[1].AsLiteral().(types.String)
- return un.visitSelectInternal(operand, false, ".?", string(field))
-}
-
-func (un *unparser) visitSelect(expr ast.Expr) error {
- sel := expr.AsSelect()
- return un.visitSelectInternal(sel.Operand(), sel.IsTestOnly(), ".", sel.FieldName())
-}
-
-func (un *unparser) visitSelectInternal(operand ast.Expr, testOnly bool, op string, field string) error {
- // handle the case when the select expression was generated by the has() macro.
- if testOnly {
- un.str.WriteString("has(")
- }
- nested := !testOnly && isBinaryOrTernaryOperator(operand)
- err := un.visitMaybeNested(operand, nested)
- if err != nil {
- return err
- }
- un.str.WriteString(op)
- un.str.WriteString(field)
- if testOnly {
- un.str.WriteString(")")
- }
- return nil
-}
-
-func (un *unparser) visitStructMsg(expr ast.Expr) error {
- m := expr.AsStruct()
- fields := m.Fields()
- un.str.WriteString(m.TypeName())
- un.str.WriteString("{")
- for i, f := range fields {
- field := f.AsStructField()
- f := field.Name()
- if field.IsOptional() {
- un.str.WriteString("?")
- }
- un.str.WriteString(f)
- un.str.WriteString(": ")
- v := field.Value()
- err := un.visit(v)
- if err != nil {
- return err
- }
- if i < len(fields)-1 {
- un.str.WriteString(", ")
- }
- }
- un.str.WriteString("}")
- return nil
-}
-
-func (un *unparser) visitStructMap(expr ast.Expr) error {
- m := expr.AsMap()
- entries := m.Entries()
- un.str.WriteString("{")
- for i, e := range entries {
- entry := e.AsMapEntry()
- k := entry.Key()
- if entry.IsOptional() {
- un.str.WriteString("?")
- }
- err := un.visit(k)
- if err != nil {
- return err
- }
- un.str.WriteString(": ")
- v := entry.Value()
- err = un.visit(v)
- if err != nil {
- return err
- }
- if i < len(entries)-1 {
- un.str.WriteString(", ")
- }
- }
- un.str.WriteString("}")
- return nil
-}
-
-func (un *unparser) visitMaybeMacroCall(expr ast.Expr) (bool, error) {
- call, found := un.info.GetMacroCall(expr.ID())
- if !found {
- return false, nil
- }
- return true, un.visit(call)
-}
-
-func (un *unparser) visitMaybeNested(expr ast.Expr, nested bool) error {
- if nested {
- un.str.WriteString("(")
- }
- err := un.visit(expr)
- if err != nil {
- return err
- }
- if nested {
- un.str.WriteString(")")
- }
- return nil
-}
-
-// isLeftRecursive indicates whether the parser resolves the call in a left-recursive manner as
-// this can have an effect of how parentheses affect the order of operations in the AST.
-func isLeftRecursive(op string) bool {
- return op != operators.LogicalAnd && op != operators.LogicalOr
-}
-
-// isSamePrecedence indicates whether the precedence of the input operator is the same as the
-// precedence of the (possible) operation represented in the input Expr.
-//
-// If the expr is not a Call, the result is false.
-func isSamePrecedence(op string, expr ast.Expr) bool {
- if expr.Kind() != ast.CallKind {
- return false
- }
- c := expr.AsCall()
- other := c.FunctionName()
- return operators.Precedence(op) == operators.Precedence(other)
-}
-
-// isLowerPrecedence indicates whether the precedence of the input operator is lower precedence
-// than the (possible) operation represented in the input Expr.
-//
-// If the expr is not a Call, the result is false.
-func isLowerPrecedence(op string, expr ast.Expr) bool {
- c := expr.AsCall()
- other := c.FunctionName()
- return operators.Precedence(op) < operators.Precedence(other)
-}
-
-// Indicates whether the expr is a complex operator, i.e., a call expression
-// with 2 or more arguments.
-func isComplexOperator(expr ast.Expr) bool {
- if expr.Kind() == ast.CallKind && len(expr.AsCall().Args()) >= 2 {
- return true
- }
- return false
-}
-
-// Indicates whether it is a complex operation compared to another.
-// expr is *not* considered complex if it is not a call expression or has
-// less than two arguments, or if it has a higher precedence than op.
-func isComplexOperatorWithRespectTo(op string, expr ast.Expr) bool {
- if expr.Kind() != ast.CallKind || len(expr.AsCall().Args()) < 2 {
- return false
- }
- return isLowerPrecedence(op, expr)
-}
-
-// Indicate whether this is a binary or ternary operator.
-func isBinaryOrTernaryOperator(expr ast.Expr) bool {
- if expr.Kind() != ast.CallKind || len(expr.AsCall().Args()) < 2 {
- return false
- }
- _, isBinaryOp := operators.FindReverseBinaryOperator(expr.AsCall().FunctionName())
- return isBinaryOp || isSamePrecedence(operators.Conditional, expr)
-}
-
-// bytesToOctets converts byte sequences to a string using a three digit octal encoded value
-// per byte.
-func bytesToOctets(byteVal []byte) string {
- var b strings.Builder
- for _, c := range byteVal {
- fmt.Fprintf(&b, "\\%03o", c)
- }
- return b.String()
-}
-
-// writeOperatorWithWrapping outputs the operator and inserts a newline for operators configured
-// in the unparser options.
-func (un *unparser) writeOperatorWithWrapping(fun string, unmangled string) bool {
- _, wrapOperatorExists := un.options.operatorsToWrapOn[fun]
- lineLength := un.str.Len() - un.lastWrappedIndex + len(fun)
-
- if wrapOperatorExists && lineLength >= un.options.wrapOnColumn {
- un.lastWrappedIndex = un.str.Len()
- // wrapAfterColumnLimit flag dictates whether the newline is placed
- // before or after the operator
- if un.options.wrapAfterColumnLimit {
- // Input: a && b
- // Output: a &&\nb
- un.str.WriteString(" ")
- un.str.WriteString(unmangled)
- un.str.WriteString("\n")
- } else {
- // Input: a && b
- // Output: a\n&& b
- un.str.WriteString("\n")
- un.str.WriteString(unmangled)
- un.str.WriteString(" ")
- }
- return true
- }
- un.str.WriteString(" ")
- un.str.WriteString(unmangled)
- un.str.WriteString(" ")
- return false
-}
-
-// Defined defaults for the unparser options
-var (
- defaultWrapOnColumn = 80
- defaultWrapAfterColumnLimit = true
- defaultOperatorsToWrapOn = map[string]bool{
- operators.LogicalAnd: true,
- operators.LogicalOr: true,
- }
-)
-
-// UnparserOption is a functional option for configuring the output formatting
-// of the Unparse function.
-type UnparserOption func(*unparserOption) (*unparserOption, error)
-
-// Internal representation of the UnparserOption type
-type unparserOption struct {
- wrapOnColumn int
- operatorsToWrapOn map[string]bool
- wrapAfterColumnLimit bool
-}
-
-// WrapOnColumn wraps the output expression when its string length exceeds a specified limit
-// for operators set by WrapOnOperators function or by default, "&&" and "||" will be wrapped.
-//
-// Example usage:
-//
-// Unparse(expr, sourceInfo, WrapOnColumn(40), WrapOnOperators(Operators.LogicalAnd))
-//
-// This will insert a newline immediately after the logical AND operator for the below example input:
-//
-// Input:
-// 'my-principal-group' in request.auth.claims && request.auth.claims.iat > now - duration('5m')
-//
-// Output:
-// 'my-principal-group' in request.auth.claims &&
-// request.auth.claims.iat > now - duration('5m')
-func WrapOnColumn(col int) UnparserOption {
- return func(opt *unparserOption) (*unparserOption, error) {
- if col < 1 {
- return nil, fmt.Errorf("Invalid unparser option. Wrap column value must be greater than or equal to 1. Got %v instead", col)
- }
- opt.wrapOnColumn = col
- return opt, nil
- }
-}
-
-// WrapOnOperators specifies which operators to perform word wrapping on an output expression when its string length
-// exceeds the column limit set by WrapOnColumn function.
-//
-// Word wrapping is supported on non-unary symbolic operators. Refer to operators.go for the full list
-//
-// This will replace any previously supplied operators instead of merging them.
-func WrapOnOperators(symbols ...string) UnparserOption {
- return func(opt *unparserOption) (*unparserOption, error) {
- opt.operatorsToWrapOn = make(map[string]bool)
- for _, symbol := range symbols {
- _, found := operators.FindReverse(symbol)
- if !found {
- return nil, fmt.Errorf("Invalid unparser option. Unsupported operator: %s", symbol)
- }
- arity := operators.Arity(symbol)
- if arity < 2 {
- return nil, fmt.Errorf("Invalid unparser option. Unary operators are unsupported: %s", symbol)
- }
-
- opt.operatorsToWrapOn[symbol] = true
- }
-
- return opt, nil
- }
-}
-
-// WrapAfterColumnLimit dictates whether to insert a newline before or after the specified operator
-// when word wrapping is performed.
-//
-// Example usage:
-//
-// Unparse(expr, sourceInfo, WrapOnColumn(40), WrapOnOperators(Operators.LogicalAnd), WrapAfterColumnLimit(false))
-//
-// This will insert a newline immediately before the logical AND operator for the below example input, ensuring
-// that the length of a line never exceeds the specified column limit:
-//
-// Input:
-// 'my-principal-group' in request.auth.claims && request.auth.claims.iat > now - duration('5m')
-//
-// Output:
-// 'my-principal-group' in request.auth.claims
-// && request.auth.claims.iat > now - duration('5m')
-func WrapAfterColumnLimit(wrapAfter bool) UnparserOption {
- return func(opt *unparserOption) (*unparserOption, error) {
- opt.wrapAfterColumnLimit = wrapAfter
- return opt, nil
- }
-}
diff --git a/vendor/github.com/h2non/filetype/.editorconfig b/vendor/github.com/h2non/filetype/.editorconfig
deleted file mode 100644
index 000dc0a7a..000000000
--- a/vendor/github.com/h2non/filetype/.editorconfig
+++ /dev/null
@@ -1,12 +0,0 @@
-root = true
-
-[*]
-indent_style = tabs
-indent_size = 2
-end_of_line = lf
-charset = utf-8
-trim_trailing_whitespace = true
-insert_final_newline = true
-
-[*.md]
-trim_trailing_whitespace = false
diff --git a/vendor/github.com/h2non/filetype/.gitignore b/vendor/github.com/h2non/filetype/.gitignore
deleted file mode 100644
index 6fefe6cce..000000000
--- a/vendor/github.com/h2non/filetype/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-bin
-.DS_Store
diff --git a/vendor/github.com/h2non/filetype/.travis.yml b/vendor/github.com/h2non/filetype/.travis.yml
deleted file mode 100644
index c9cdbc8da..000000000
--- a/vendor/github.com/h2non/filetype/.travis.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-language: go
-arch:
- - AMD64
- - ppc64le
-go:
- - "1.13"
- - "1.14"
-
-before_install:
- - go get -u -v golang.org/x/lint/golint
-
-script:
- - diff -u <(echo -n) <(gofmt -s -d ./)
- - diff -u <(echo -n) <(go vet ./...)
- - diff -u <(echo -n) <(golint)
- - go test -v -race ./...
diff --git a/vendor/github.com/h2non/filetype/History.md b/vendor/github.com/h2non/filetype/History.md
deleted file mode 100644
index f53f3d241..000000000
--- a/vendor/github.com/h2non/filetype/History.md
+++ /dev/null
@@ -1,163 +0,0 @@
-
-v1.0.3 / 2021-11-21
-===================
-
- * fix(#108): add application file matchers
- * Merge pull request #106 from hannesbraun/aiff-support
- * Add AIFF support
- * fix(archive): format issue indentation
- * feat(version): bump patch
- * Merge pull request #100 from da2018/master
- * Enhance Zstd support
- * Merge pull request #98 from cfergeau/zstd
- * Add zstd support
- * Merge pull request #99 from cfergeau/byteprefixmatcher
- * Introduce bytePrefixMatcher helper
-
-v1.1.0 / 2020-06-06
-===================
-
- * feat: version bump v1.10
- * feat(ci): add go 1.14
- * Merge pull request #82 from andrewstucki/sqlite-update
- * Merge pull request #84 from evanoberholster/master
- * Better differentiation: between image/x-canon-cr2 and image/tiff
- * Merge pull request #1 from h2non/master
- * Update ico filetype per https://www.iana.org/assignments/media-types/image/vnd.microsoft.icon
- * Update rar filetype per https://www.iana.org/assignments/media-types/application/vnd.rar
- * Update exe filetype per https://www.iana.org/assignments/media-types/application/vnd.microsoft.portable-executable
- * Update deb filetype per https://www.iana.org/assignments/media-types/application/vnd.debian.binary-package
- * Update sqlite filetype per https://www.iana.org/assignments/media-types/application/vnd.sqlite3
- * Merge pull request #72 from turn88/master
- * Update document.go
- * Update document.go
- * Update document.go
- * add matchers for office 2003
-
-v1.0.10 / 2019-08-06
-====================
-
- * Merge pull request #76 from lex-r/fix-matroska-detection
- * fix: mkv and webm types detection
-
-v1.0.9 / 2019-07-25
-===================
-
- * Merge pull request #75 from Trane9991/master
- * add video/3gpp support
- * fix: use proper iso file mime type
- * feat: add iso image format
- * Merge pull request #65 from Fentonz/master
- * Merge pull request #70 from fanpei91/master
- * add image/vnd.dwg to README
- * add image/vnd.dwg support
- * Added support for .iso files
-
-v1.0.8 / 2019-02-10
-===================
-
- * refactor(images): heic -> heif
- * feat(docs): add heif format
- * Merge pull request #60 from rikonor/master
- * add heif/heic support
- * fix(docs): dicom -> dcm
- * feat: add dicom type
- * Merge pull request #58 from Fentonz/master
- * Merge pull request #59 from kmanley/master
- * fix example; related to h2non/filetype#43
- * Added DICOM type to archive
-
-
-v1.0.7 / 2019-02-09
-===================
-
- * Merge pull request #56 from akupila/wasm
- * add wasm to readme
- * detect wasm file type
-
-v1.0.6 / 2019-01-22
-===================
-
- * Merge pull request #55 from ivanlemeshev/master
- * Added ftypmp4v to MP4 matcher
- * Merge pull request #54 from aofei/master
- * chore: add support for Go modules
- * feat: add support for AAC (audio/aac)
- * Merge pull request #53 from lynxbyorion/check-for-docoments
- * Added checks for documents.
- * Merge pull request #51 from eriken/master
- * fixed bad mime and import paths
- * Merge pull request #50 from eriken/jpeg2000_support
- * fix import paths
- * jpeg2000 support
- * Merge pull request #47 from Ma124/master
- * Merge pull request #49 from amoore614/master
- * more robust check for .mov files
- * bugfix: reverse order of matcher key list so user registered matchers appear first
- * bugfix: store ptr to MatcherKeys in case user registered matchers are used.
- * update comment
- * Bump buffer size to 8K to allow for more custom file matching
- * refactor(readme): update package import path
- * Merge pull request #48 from kumakichi/support_msooxml
- * do not use v1
- * ok, master already changed travis
- * add fixtures, but MatchReader may not work for some msooxml files, 4096 bytes maybe not enough
- * support ms ooxml, #40
- * Fixed misspells
- * fix(travis): use string notation for matrix items
- * Merge pull request #42 from bruth/patch-2
- * refactor(travis): remove Go 1.6, add Go 1.10
- * Change maximum bytes required for detection
- * Merge pull request #36 from yiiTT/patch-1
- * Add MP4 dash and additional ISO formats
- * Merge pull request #34 from RangelReale/fix-mp4-case
- * Merge pull request #32 from yiiTT/fix-m4v
- * Fixed mp4 detection case-sensitivity according to http://www.ftyps.com/
- * Fix M4v matcher
-
-v1.0.5 / 2017-12-12
-===================
-
- * Merge pull request #30 from RangelReale/fix_mp4
- * Fix duplicated item in mp4 fix
- * Fix MP4 matcher, with information from http://www.file-recovery.com/mp4-signature-format.htm
- * Merge pull request #28 from ikovic/master
- * Updated file header example.
-
-v1.0.4 / 2017-11-29
-===================
-
- * fix: tests and document types matchers
- * refactor(docs): remove codesponsor
- * Merge pull request #26 from bienkma/master
- * Add support check file type: .doc, .docx, .pptx, .ppt, .xls, .xlsx
- * feat(docs): add code sponsor banner
- * feat(travis): add go 1.9
- * Merge pull request #24 from strazzere/patch-1
- * Fix typo in unknown
-
-v1.0.3 / 2017-08-03
-===================
-
- * Merge pull request #21 from elemeta/master
- * Add Elf file as supported matcher archive type
-
-v1.0.2 / 2017-07-26
-===================
-
- * Merge pull request #20 from marshyski/master
- * Added RedHat RPM as supported matcher archive type
- * Merge pull request #19 from nlamirault/patch-1
- * Fix typo in documentation
-
-v1.0.1 / 2017-02-24
-===================
-
- * Merge pull request #18 from Impyy/enable-webm
- * Enable the webm matcher
- * feat(docs): add Go version badge
-
-1.0.0 / 2016-12-11
-==================
-
-- Initial stable version (v1.0.0).
diff --git a/vendor/github.com/h2non/filetype/LICENSE b/vendor/github.com/h2non/filetype/LICENSE
deleted file mode 100644
index 30ede59b6..000000000
--- a/vendor/github.com/h2non/filetype/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-The MIT License
-
-Copyright (c) Tomas Aparicio
-
-Permission is hereby granted, free of charge, to any person
-obtaining a copy of this software and associated documentation
-files (the "Software"), to deal in the Software without
-restriction, including without limitation the rights to use,
-copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/h2non/filetype/README.md b/vendor/github.com/h2non/filetype/README.md
deleted file mode 100644
index 64033e903..000000000
--- a/vendor/github.com/h2non/filetype/README.md
+++ /dev/null
@@ -1,294 +0,0 @@
-# filetype [![Build Status](https://travis-ci.org/h2non/filetype.svg)](https://travis-ci.org/h2non/filetype) [![GoDoc](https://godoc.org/github.com/h2non/filetype?status.svg)](https://godoc.org/github.com/h2non/filetype) [![Go Report Card](http://goreportcard.com/badge/h2non/filetype)](http://goreportcard.com/report/h2non/filetype) [![Go Version](https://img.shields.io/badge/go-v1.0+-green.svg?style=flat)](https://github.com/h2non/gentleman)
-
-Small and dependency free [Go](https://golang.org) package to infer file and MIME type checking the [magic numbers]() signature.
-
-For SVG file type checking, see [go-is-svg](https://github.com/h2non/go-is-svg) package. Python port: [filetype.py](https://github.com/h2non/filetype.py).
-
-## Features
-
-- Supports a [wide range](#supported-types) of file types
-- Provides file extension and proper MIME type
-- File discovery by extension or MIME type
-- File discovery by class (image, video, audio...)
-- Provides a bunch of helpers and file matching shortcuts
-- [Pluggable](#add-additional-file-type-matchers): add custom new types and matchers
-- Simple and semantic API
-- [Blazing fast](#benchmarks), even processing large files
-- Only first 262 bytes representing the max file header is required, so you can just [pass a slice](#file-header)
-- Dependency free (just Go code, no C compilation needed)
-- Cross-platform file recognition
-
-## Installation
-
-```bash
-go get github.com/h2non/filetype
-```
-
-## API
-
-See [Godoc](https://godoc.org/github.com/h2non/filetype) reference.
-
-### Subpackages
-
-- [`github.com/h2non/filetype/types`](https://godoc.org/github.com/h2non/filetype/types)
-- [`github.com/h2non/filetype/matchers`](https://godoc.org/github.com/h2non/filetype/matchers)
-
-## Examples
-
-#### Simple file type checking
-
-```go
-package main
-
-import (
- "fmt"
- "io/ioutil"
-
- "github.com/h2non/filetype"
-)
-
-func main() {
- buf, _ := ioutil.ReadFile("sample.jpg")
-
- kind, _ := filetype.Match(buf)
- if kind == filetype.Unknown {
- fmt.Println("Unknown file type")
- return
- }
-
- fmt.Printf("File type: %s. MIME: %s\n", kind.Extension, kind.MIME.Value)
-}
-```
-
-#### Check type class
-
-```go
-package main
-
-import (
- "fmt"
- "io/ioutil"
-
- "github.com/h2non/filetype"
-)
-
-func main() {
- buf, _ := ioutil.ReadFile("sample.jpg")
-
- if filetype.IsImage(buf) {
- fmt.Println("File is an image")
- } else {
- fmt.Println("Not an image")
- }
-}
-```
-
-#### Supported type
-
-```go
-package main
-
-import (
- "fmt"
-
- "github.com/h2non/filetype"
-)
-
-func main() {
- // Check if file is supported by extension
- if filetype.IsSupported("jpg") {
- fmt.Println("Extension supported")
- } else {
- fmt.Println("Extension not supported")
- }
-
- // Check if file is supported by extension
- if filetype.IsMIMESupported("image/jpeg") {
- fmt.Println("MIME type supported")
- } else {
- fmt.Println("MIME type not supported")
- }
-}
-```
-
-#### File header
-
-```go
-package main
-
-import (
- "fmt"
- "io/ioutil"
-
- "github.com/h2non/filetype"
-)
-
-func main() {
- // Open a file descriptor
- file, _ := os.Open("movie.mp4")
-
- // We only have to pass the file header = first 261 bytes
- head := make([]byte, 261)
- file.Read(head)
-
- if filetype.IsImage(head) {
- fmt.Println("File is an image")
- } else {
- fmt.Println("Not an image")
- }
-}
-```
-
-#### Add additional file type matchers
-
-```go
-package main
-
-import (
- "fmt"
-
- "github.com/h2non/filetype"
-)
-
-var fooType = filetype.NewType("foo", "foo/foo")
-
-func fooMatcher(buf []byte) bool {
- return len(buf) > 1 && buf[0] == 0x01 && buf[1] == 0x02
-}
-
-func main() {
- // Register the new matcher and its type
- filetype.AddMatcher(fooType, fooMatcher)
-
- // Check if the new type is supported by extension
- if filetype.IsSupported("foo") {
- fmt.Println("New supported type: foo")
- }
-
- // Check if the new type is supported by MIME
- if filetype.IsMIMESupported("foo/foo") {
- fmt.Println("New supported MIME type: foo/foo")
- }
-
- // Try to match the file
- fooFile := []byte{0x01, 0x02}
- kind, _ := filetype.Match(fooFile)
- if kind == filetype.Unknown {
- fmt.Println("Unknown file type")
- } else {
- fmt.Printf("File type matched: %s\n", kind.Extension)
- }
-}
-```
-
-## Supported types
-
-#### Image
-
-- **jpg** - `image/jpeg`
-- **png** - `image/png`
-- **gif** - `image/gif`
-- **webp** - `image/webp`
-- **cr2** - `image/x-canon-cr2`
-- **tif** - `image/tiff`
-- **bmp** - `image/bmp`
-- **heif** - `image/heif`
-- **jxr** - `image/vnd.ms-photo`
-- **psd** - `image/vnd.adobe.photoshop`
-- **ico** - `image/vnd.microsoft.icon`
-- **dwg** - `image/vnd.dwg`
-
-#### Video
-
-- **mp4** - `video/mp4`
-- **m4v** - `video/x-m4v`
-- **mkv** - `video/x-matroska`
-- **webm** - `video/webm`
-- **mov** - `video/quicktime`
-- **avi** - `video/x-msvideo`
-- **wmv** - `video/x-ms-wmv`
-- **mpg** - `video/mpeg`
-- **flv** - `video/x-flv`
-- **3gp** - `video/3gpp`
-
-#### Audio
-
-- **mid** - `audio/midi`
-- **mp3** - `audio/mpeg`
-- **m4a** - `audio/m4a`
-- **ogg** - `audio/ogg`
-- **flac** - `audio/x-flac`
-- **wav** - `audio/x-wav`
-- **amr** - `audio/amr`
-- **aac** - `audio/aac`
-- **aiff** - `audio/x-aiff`
-
-#### Archive
-
-- **epub** - `application/epub+zip`
-- **zip** - `application/zip`
-- **tar** - `application/x-tar`
-- **rar** - `application/vnd.rar`
-- **gz** - `application/gzip`
-- **bz2** - `application/x-bzip2`
-- **7z** - `application/x-7z-compressed`
-- **xz** - `application/x-xz`
-- **zstd** - `application/zstd`
-- **pdf** - `application/pdf`
-- **exe** - `application/vnd.microsoft.portable-executable`
-- **swf** - `application/x-shockwave-flash`
-- **rtf** - `application/rtf`
-- **iso** - `application/x-iso9660-image`
-- **eot** - `application/octet-stream`
-- **ps** - `application/postscript`
-- **sqlite** - `application/vnd.sqlite3`
-- **nes** - `application/x-nintendo-nes-rom`
-- **crx** - `application/x-google-chrome-extension`
-- **cab** - `application/vnd.ms-cab-compressed`
-- **deb** - `application/vnd.debian.binary-package`
-- **ar** - `application/x-unix-archive`
-- **Z** - `application/x-compress`
-- **lz** - `application/x-lzip`
-- **rpm** - `application/x-rpm`
-- **elf** - `application/x-executable`
-- **dcm** - `application/dicom`
-
-#### Documents
-
-- **doc** - `application/msword`
-- **docx** - `application/vnd.openxmlformats-officedocument.wordprocessingml.document`
-- **xls** - `application/vnd.ms-excel`
-- **xlsx** - `application/vnd.openxmlformats-officedocument.spreadsheetml.sheet`
-- **ppt** - `application/vnd.ms-powerpoint`
-- **pptx** - `application/vnd.openxmlformats-officedocument.presentationml.presentation`
-
-#### Font
-
-- **woff** - `application/font-woff`
-- **woff2** - `application/font-woff`
-- **ttf** - `application/font-sfnt`
-- **otf** - `application/font-sfnt`
-
-#### Application
-
-- **wasm** - `application/wasm`
-- **dex** - `application/vnd.android.dex`
-- **dey** - `application/vnd.android.dey`
-
-## Benchmarks
-
-Measured using [real files](https://github.com/h2non/filetype/tree/master/fixtures).
-
-Environment: OSX x64 i7 2.7 Ghz
-
-```bash
-BenchmarkMatchTar-8 1000000 1083 ns/op
-BenchmarkMatchZip-8 1000000 1162 ns/op
-BenchmarkMatchJpeg-8 1000000 1280 ns/op
-BenchmarkMatchGif-8 1000000 1315 ns/op
-BenchmarkMatchPng-8 1000000 1121 ns/op
-```
-
-## License
-
-MIT - Tomas Aparicio
diff --git a/vendor/github.com/h2non/filetype/filetype.go b/vendor/github.com/h2non/filetype/filetype.go
deleted file mode 100644
index c99691e5f..000000000
--- a/vendor/github.com/h2non/filetype/filetype.go
+++ /dev/null
@@ -1,102 +0,0 @@
-package filetype
-
-import (
- "errors"
-
- "github.com/h2non/filetype/matchers"
- "github.com/h2non/filetype/types"
-)
-
-// Types stores a map of supported types
-var Types = types.Types
-
-// NewType creates and registers a new type
-var NewType = types.NewType
-
-// Unknown represents an unknown file type
-var Unknown = types.Unknown
-
-// ErrEmptyBuffer represents an empty buffer error
-var ErrEmptyBuffer = errors.New("Empty buffer")
-
-// ErrUnknownBuffer represents a unknown buffer error
-var ErrUnknownBuffer = errors.New("Unknown buffer type")
-
-// AddType registers a new file type
-func AddType(ext, mime string) types.Type {
- return types.NewType(ext, mime)
-}
-
-// Is checks if a given buffer matches with the given file type extension
-func Is(buf []byte, ext string) bool {
- kind := types.Get(ext)
- if kind != types.Unknown {
- return IsType(buf, kind)
- }
- return false
-}
-
-// IsExtension semantic alias to Is()
-func IsExtension(buf []byte, ext string) bool {
- return Is(buf, ext)
-}
-
-// IsType checks if a given buffer matches with the given file type
-func IsType(buf []byte, kind types.Type) bool {
- matcher := matchers.Matchers[kind]
- if matcher == nil {
- return false
- }
- return matcher(buf) != types.Unknown
-}
-
-// IsMIME checks if a given buffer matches with the given MIME type
-func IsMIME(buf []byte, mime string) bool {
- result := false
- types.Types.Range(func(k, v interface{}) bool {
- kind := v.(types.Type)
- if kind.MIME.Value == mime {
- matcher := matchers.Matchers[kind]
- result = matcher(buf) != types.Unknown
- return false
- }
- return true
- })
-
- return result
-}
-
-// IsSupported checks if a given file extension is supported
-func IsSupported(ext string) bool {
- result := false
- types.Types.Range(func(k, v interface{}) bool {
- key := k.(string)
- if key == ext {
- result = true
- return false
- }
- return true
- })
-
- return result
-}
-
-// IsMIMESupported checks if a given MIME type is supported
-func IsMIMESupported(mime string) bool {
- result := false
- types.Types.Range(func(k, v interface{}) bool {
- kind := v.(types.Type)
- if kind.MIME.Value == mime {
- result = true
- return false
- }
- return true
- })
-
- return result
-}
-
-// GetType retrieves a Type by file extension
-func GetType(ext string) types.Type {
- return types.Get(ext)
-}
diff --git a/vendor/github.com/h2non/filetype/kind.go b/vendor/github.com/h2non/filetype/kind.go
deleted file mode 100644
index 9608b0a7a..000000000
--- a/vendor/github.com/h2non/filetype/kind.go
+++ /dev/null
@@ -1,91 +0,0 @@
-package filetype
-
-import (
- "github.com/h2non/filetype/matchers"
- "github.com/h2non/filetype/types"
-)
-
-// Image tries to match a file as image type
-func Image(buf []byte) (types.Type, error) {
- return doMatchMap(buf, matchers.Image)
-}
-
-// IsImage checks if the given buffer is an image type
-func IsImage(buf []byte) bool {
- kind, _ := Image(buf)
- return kind != types.Unknown
-}
-
-// Audio tries to match a file as audio type
-func Audio(buf []byte) (types.Type, error) {
- return doMatchMap(buf, matchers.Audio)
-}
-
-// IsAudio checks if the given buffer is an audio type
-func IsAudio(buf []byte) bool {
- kind, _ := Audio(buf)
- return kind != types.Unknown
-}
-
-// Video tries to match a file as video type
-func Video(buf []byte) (types.Type, error) {
- return doMatchMap(buf, matchers.Video)
-}
-
-// IsVideo checks if the given buffer is a video type
-func IsVideo(buf []byte) bool {
- kind, _ := Video(buf)
- return kind != types.Unknown
-}
-
-// Font tries to match a file as text font type
-func Font(buf []byte) (types.Type, error) {
- return doMatchMap(buf, matchers.Font)
-}
-
-// IsFont checks if the given buffer is a font type
-func IsFont(buf []byte) bool {
- kind, _ := Font(buf)
- return kind != types.Unknown
-}
-
-// Archive tries to match a file as generic archive type
-func Archive(buf []byte) (types.Type, error) {
- return doMatchMap(buf, matchers.Archive)
-}
-
-// IsArchive checks if the given buffer is an archive type
-func IsArchive(buf []byte) bool {
- kind, _ := Archive(buf)
- return kind != types.Unknown
-}
-
-// Document tries to match a file as document type
-func Document(buf []byte) (types.Type, error) {
- return doMatchMap(buf, matchers.Document)
-}
-
-// IsDocument checks if the given buffer is an document type
-func IsDocument(buf []byte) bool {
- kind, _ := Document(buf)
- return kind != types.Unknown
-}
-
-// Application tries to match a file as an application type
-func Application(buf []byte) (types.Type, error) {
- return doMatchMap(buf, matchers.Application)
-}
-
-// IsApplication checks if the given buffer is an application type
-func IsApplication(buf []byte) bool {
- kind, _ := Application(buf)
- return kind != types.Unknown
-}
-
-func doMatchMap(buf []byte, machers matchers.Map) (types.Type, error) {
- kind := MatchMap(buf, machers)
- if kind != types.Unknown {
- return kind, nil
- }
- return kind, ErrUnknownBuffer
-}
diff --git a/vendor/github.com/h2non/filetype/match.go b/vendor/github.com/h2non/filetype/match.go
deleted file mode 100644
index 82cf80468..000000000
--- a/vendor/github.com/h2non/filetype/match.go
+++ /dev/null
@@ -1,90 +0,0 @@
-package filetype
-
-import (
- "io"
- "os"
-
- "github.com/h2non/filetype/matchers"
- "github.com/h2non/filetype/types"
-)
-
-// Matchers is an alias to matchers.Matchers
-var Matchers = matchers.Matchers
-
-// MatcherKeys is an alias to matchers.MatcherKeys
-var MatcherKeys = &matchers.MatcherKeys
-
-// NewMatcher is an alias to matchers.NewMatcher
-var NewMatcher = matchers.NewMatcher
-
-// Match infers the file type of a given buffer inspecting its magic numbers signature
-func Match(buf []byte) (types.Type, error) {
- length := len(buf)
- if length == 0 {
- return types.Unknown, ErrEmptyBuffer
- }
-
- for _, kind := range *MatcherKeys {
- checker := Matchers[kind]
- match := checker(buf)
- if match != types.Unknown && match.Extension != "" {
- return match, nil
- }
- }
-
- return types.Unknown, nil
-}
-
-// Get is an alias to Match()
-func Get(buf []byte) (types.Type, error) {
- return Match(buf)
-}
-
-// MatchFile infers a file type for a file
-func MatchFile(filepath string) (types.Type, error) {
- file, err := os.Open(filepath)
- if err != nil {
- return types.Unknown, err
- }
- defer file.Close()
-
- return MatchReader(file)
-}
-
-// MatchReader is convenient wrapper to Match() any Reader
-func MatchReader(reader io.Reader) (types.Type, error) {
- buffer := make([]byte, 8192) // 8K makes msooxml tests happy and allows for expanded custom file checks
-
- _, err := reader.Read(buffer)
- if err != nil && err != io.EOF {
- return types.Unknown, err
- }
-
- return Match(buffer)
-}
-
-// AddMatcher registers a new matcher type
-func AddMatcher(fileType types.Type, matcher matchers.Matcher) matchers.TypeMatcher {
- return matchers.NewMatcher(fileType, matcher)
-}
-
-// Matches checks if the given buffer matches with some supported file type
-func Matches(buf []byte) bool {
- kind, _ := Match(buf)
- return kind != types.Unknown
-}
-
-// MatchMap performs a file matching against a map of match functions
-func MatchMap(buf []byte, matchers matchers.Map) types.Type {
- for kind, matcher := range matchers {
- if matcher(buf) {
- return kind
- }
- }
- return types.Unknown
-}
-
-// MatchesMap is an alias to Matches() but using matching against a map of match functions
-func MatchesMap(buf []byte, matchers matchers.Map) bool {
- return MatchMap(buf, matchers) != types.Unknown
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/application.go b/vendor/github.com/h2non/filetype/matchers/application.go
deleted file mode 100644
index 67fdab3d8..000000000
--- a/vendor/github.com/h2non/filetype/matchers/application.go
+++ /dev/null
@@ -1,43 +0,0 @@
-package matchers
-
-var (
- TypeWasm = newType("wasm", "application/wasm")
- TypeDex = newType("dex", "application/vnd.android.dex")
- TypeDey = newType("dey", "application/vnd.android.dey")
-)
-
-var Application = Map{
- TypeWasm: Wasm,
- TypeDex: Dex,
- TypeDey: Dey,
-}
-
-// Wasm detects a Web Assembly 1.0 filetype.
-func Wasm(buf []byte) bool {
- // WASM has starts with `\0asm`, followed by the version.
- // http://webassembly.github.io/spec/core/binary/modules.html#binary-magic
- return len(buf) >= 8 &&
- buf[0] == 0x00 && buf[1] == 0x61 &&
- buf[2] == 0x73 && buf[3] == 0x6D &&
- buf[4] == 0x01 && buf[5] == 0x00 &&
- buf[6] == 0x00 && buf[7] == 0x00
-}
-
-// Dex detects dalvik executable(DEX)
-func Dex(buf []byte) bool {
- // https://source.android.com/devices/tech/dalvik/dex-format#dex-file-magic
- return len(buf) > 36 &&
- // magic
- buf[0] == 0x64 && buf[1] == 0x65 && buf[2] == 0x78 && buf[3] == 0x0A &&
- // file sise
- buf[36] == 0x70
-}
-
-// Dey Optimized Dalvik Executable(ODEX)
-func Dey(buf []byte) bool {
- return len(buf) > 100 &&
- // dey magic
- buf[0] == 0x64 && buf[1] == 0x65 && buf[2] == 0x79 && buf[3] == 0x0A &&
- // dex
- Dex(buf[40:100])
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/archive.go b/vendor/github.com/h2non/filetype/matchers/archive.go
deleted file mode 100644
index ee618a925..000000000
--- a/vendor/github.com/h2non/filetype/matchers/archive.go
+++ /dev/null
@@ -1,211 +0,0 @@
-package matchers
-
-import "encoding/binary"
-
-const (
- ZstdMagicSkippableStart = 0x184D2A50
- ZstdMagicSkippableMask = 0xFFFFFFF0
-)
-
-var (
- TypeEpub = newType("epub", "application/epub+zip")
- TypeZip = newType("zip", "application/zip")
- TypeTar = newType("tar", "application/x-tar")
- TypeRar = newType("rar", "application/vnd.rar")
- TypeGz = newType("gz", "application/gzip")
- TypeBz2 = newType("bz2", "application/x-bzip2")
- Type7z = newType("7z", "application/x-7z-compressed")
- TypeXz = newType("xz", "application/x-xz")
- TypeZstd = newType("zst", "application/zstd")
- TypePdf = newType("pdf", "application/pdf")
- TypeExe = newType("exe", "application/vnd.microsoft.portable-executable")
- TypeSwf = newType("swf", "application/x-shockwave-flash")
- TypeRtf = newType("rtf", "application/rtf")
- TypeEot = newType("eot", "application/octet-stream")
- TypePs = newType("ps", "application/postscript")
- TypeSqlite = newType("sqlite", "application/vnd.sqlite3")
- TypeNes = newType("nes", "application/x-nintendo-nes-rom")
- TypeCrx = newType("crx", "application/x-google-chrome-extension")
- TypeCab = newType("cab", "application/vnd.ms-cab-compressed")
- TypeDeb = newType("deb", "application/vnd.debian.binary-package")
- TypeAr = newType("ar", "application/x-unix-archive")
- TypeZ = newType("Z", "application/x-compress")
- TypeLz = newType("lz", "application/x-lzip")
- TypeRpm = newType("rpm", "application/x-rpm")
- TypeElf = newType("elf", "application/x-executable")
- TypeDcm = newType("dcm", "application/dicom")
- TypeIso = newType("iso", "application/x-iso9660-image")
- TypeMachO = newType("macho", "application/x-mach-binary") // Mach-O binaries have no common extension.
-)
-
-var Archive = Map{
- TypeEpub: bytePrefixMatcher(epubMagic),
- TypeZip: Zip,
- TypeTar: Tar,
- TypeRar: Rar,
- TypeGz: bytePrefixMatcher(gzMagic),
- TypeBz2: bytePrefixMatcher(bz2Magic),
- Type7z: bytePrefixMatcher(sevenzMagic),
- TypeXz: bytePrefixMatcher(xzMagic),
- TypeZstd: Zst,
- TypePdf: bytePrefixMatcher(pdfMagic),
- TypeExe: bytePrefixMatcher(exeMagic),
- TypeSwf: Swf,
- TypeRtf: bytePrefixMatcher(rtfMagic),
- TypeEot: Eot,
- TypePs: bytePrefixMatcher(psMagic),
- TypeSqlite: bytePrefixMatcher(sqliteMagic),
- TypeNes: bytePrefixMatcher(nesMagic),
- TypeCrx: bytePrefixMatcher(crxMagic),
- TypeCab: Cab,
- TypeDeb: bytePrefixMatcher(debMagic),
- TypeAr: bytePrefixMatcher(arMagic),
- TypeZ: Z,
- TypeLz: bytePrefixMatcher(lzMagic),
- TypeRpm: Rpm,
- TypeElf: Elf,
- TypeDcm: Dcm,
- TypeIso: Iso,
- TypeMachO: MachO,
-}
-
-var (
- epubMagic = []byte{
- 0x50, 0x4B, 0x03, 0x04, 0x6D, 0x69, 0x6D, 0x65,
- 0x74, 0x79, 0x70, 0x65, 0x61, 0x70, 0x70, 0x6C,
- 0x69, 0x63, 0x61, 0x74, 0x69, 0x6F, 0x6E, 0x2F,
- 0x65, 0x70, 0x75, 0x62, 0x2B, 0x7A, 0x69, 0x70,
- }
- gzMagic = []byte{0x1F, 0x8B, 0x08}
- bz2Magic = []byte{0x42, 0x5A, 0x68}
- sevenzMagic = []byte{0x37, 0x7A, 0xBC, 0xAF, 0x27, 0x1C}
- pdfMagic = []byte{0x25, 0x50, 0x44, 0x46}
- exeMagic = []byte{0x4D, 0x5A}
- rtfMagic = []byte{0x7B, 0x5C, 0x72, 0x74, 0x66}
- nesMagic = []byte{0x4E, 0x45, 0x53, 0x1A}
- crxMagic = []byte{0x43, 0x72, 0x32, 0x34}
- psMagic = []byte{0x25, 0x21}
- xzMagic = []byte{0xFD, 0x37, 0x7A, 0x58, 0x5A, 0x00}
- sqliteMagic = []byte{0x53, 0x51, 0x4C, 0x69}
- debMagic = []byte{
- 0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E, 0x0A,
- 0x64, 0x65, 0x62, 0x69, 0x61, 0x6E, 0x2D, 0x62,
- 0x69, 0x6E, 0x61, 0x72, 0x79,
- }
- arMagic = []byte{0x21, 0x3C, 0x61, 0x72, 0x63, 0x68, 0x3E}
- zstdMagic = []byte{0x28, 0xB5, 0x2F, 0xFD}
- lzMagic = []byte{0x4C, 0x5A, 0x49, 0x50}
-)
-
-func bytePrefixMatcher(magicPattern []byte) Matcher {
- return func(data []byte) bool {
- return compareBytes(data, magicPattern, 0)
- }
-}
-
-func Zip(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x50 && buf[1] == 0x4B &&
- (buf[2] == 0x3 || buf[2] == 0x5 || buf[2] == 0x7) &&
- (buf[3] == 0x4 || buf[3] == 0x6 || buf[3] == 0x8)
-}
-
-func Tar(buf []byte) bool {
- return len(buf) > 261 &&
- buf[257] == 0x75 && buf[258] == 0x73 &&
- buf[259] == 0x74 && buf[260] == 0x61 &&
- buf[261] == 0x72
-}
-
-func Rar(buf []byte) bool {
- return len(buf) > 6 &&
- buf[0] == 0x52 && buf[1] == 0x61 && buf[2] == 0x72 &&
- buf[3] == 0x21 && buf[4] == 0x1A && buf[5] == 0x7 &&
- (buf[6] == 0x0 || buf[6] == 0x1)
-}
-
-func Swf(buf []byte) bool {
- return len(buf) > 2 &&
- (buf[0] == 0x43 || buf[0] == 0x46) &&
- buf[1] == 0x57 && buf[2] == 0x53
-}
-
-func Cab(buf []byte) bool {
- return len(buf) > 3 &&
- ((buf[0] == 0x4D && buf[1] == 0x53 && buf[2] == 0x43 && buf[3] == 0x46) ||
- (buf[0] == 0x49 && buf[1] == 0x53 && buf[2] == 0x63 && buf[3] == 0x28))
-}
-
-func Eot(buf []byte) bool {
- return len(buf) > 35 &&
- buf[34] == 0x4C && buf[35] == 0x50 &&
- ((buf[8] == 0x02 && buf[9] == 0x00 &&
- buf[10] == 0x01) || (buf[8] == 0x01 &&
- buf[9] == 0x00 && buf[10] == 0x00) ||
- (buf[8] == 0x02 && buf[9] == 0x00 &&
- buf[10] == 0x02))
-}
-
-func Z(buf []byte) bool {
- return len(buf) > 1 &&
- ((buf[0] == 0x1F && buf[1] == 0xA0) ||
- (buf[0] == 0x1F && buf[1] == 0x9D))
-}
-
-func Rpm(buf []byte) bool {
- return len(buf) > 96 &&
- buf[0] == 0xED && buf[1] == 0xAB &&
- buf[2] == 0xEE && buf[3] == 0xDB
-}
-
-func Elf(buf []byte) bool {
- return len(buf) > 52 &&
- buf[0] == 0x7F && buf[1] == 0x45 &&
- buf[2] == 0x4C && buf[3] == 0x46
-}
-
-func Dcm(buf []byte) bool {
- return len(buf) > 131 &&
- buf[128] == 0x44 && buf[129] == 0x49 &&
- buf[130] == 0x43 && buf[131] == 0x4D
-}
-
-func Iso(buf []byte) bool {
- return len(buf) > 32773 &&
- buf[32769] == 0x43 && buf[32770] == 0x44 &&
- buf[32771] == 0x30 && buf[32772] == 0x30 &&
- buf[32773] == 0x31
-}
-
-func MachO(buf []byte) bool {
- return len(buf) > 3 && ((buf[0] == 0xFE && buf[1] == 0xED && buf[2] == 0xFA && buf[3] == 0xCF) ||
- (buf[0] == 0xFE && buf[1] == 0xED && buf[2] == 0xFA && buf[3] == 0xCE) ||
- (buf[0] == 0xBE && buf[1] == 0xBA && buf[2] == 0xFE && buf[3] == 0xCA) ||
- // Big endian versions below here...
- (buf[0] == 0xCF && buf[1] == 0xFA && buf[2] == 0xED && buf[3] == 0xFE) ||
- (buf[0] == 0xCE && buf[1] == 0xFA && buf[2] == 0xED && buf[3] == 0xFE) ||
- (buf[0] == 0xCA && buf[1] == 0xFE && buf[2] == 0xBA && buf[3] == 0xBE))
-}
-
-// Zstandard compressed data is made of one or more frames.
-// There are two frame formats defined by Zstandard: Zstandard frames and Skippable frames.
-// See more details from https://tools.ietf.org/id/draft-kucherawy-dispatch-zstd-00.html#rfc.section.2
-func Zst(buf []byte) bool {
- if compareBytes(buf, zstdMagic, 0) {
- return true
- } else {
- // skippable frames
- if len(buf) < 8 {
- return false
- }
- if binary.LittleEndian.Uint32(buf[:4]) & ZstdMagicSkippableMask == ZstdMagicSkippableStart {
- userDataLength := binary.LittleEndian.Uint32(buf[4:8])
- if len(buf) < 8 + int(userDataLength) {
- return false
- }
- nextFrame := buf[8+userDataLength:]
- return Zst(nextFrame)
- }
- return false
- }
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/audio.go b/vendor/github.com/h2non/filetype/matchers/audio.go
deleted file mode 100644
index b34605aec..000000000
--- a/vendor/github.com/h2non/filetype/matchers/audio.go
+++ /dev/null
@@ -1,85 +0,0 @@
-package matchers
-
-var (
- TypeMidi = newType("mid", "audio/midi")
- TypeMp3 = newType("mp3", "audio/mpeg")
- TypeM4a = newType("m4a", "audio/m4a")
- TypeOgg = newType("ogg", "audio/ogg")
- TypeFlac = newType("flac", "audio/x-flac")
- TypeWav = newType("wav", "audio/x-wav")
- TypeAmr = newType("amr", "audio/amr")
- TypeAac = newType("aac", "audio/aac")
- TypeAiff = newType("aiff", "audio/x-aiff")
-)
-
-var Audio = Map{
- TypeMidi: Midi,
- TypeMp3: Mp3,
- TypeM4a: M4a,
- TypeOgg: Ogg,
- TypeFlac: Flac,
- TypeWav: Wav,
- TypeAmr: Amr,
- TypeAac: Aac,
- TypeAiff: Aiff,
-}
-
-func Midi(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x4D && buf[1] == 0x54 &&
- buf[2] == 0x68 && buf[3] == 0x64
-}
-
-func Mp3(buf []byte) bool {
- return len(buf) > 2 &&
- ((buf[0] == 0x49 && buf[1] == 0x44 && buf[2] == 0x33) ||
- (buf[0] == 0xFF && buf[1] == 0xfb))
-}
-
-func M4a(buf []byte) bool {
- return len(buf) > 10 &&
- ((buf[4] == 0x66 && buf[5] == 0x74 && buf[6] == 0x79 &&
- buf[7] == 0x70 && buf[8] == 0x4D && buf[9] == 0x34 && buf[10] == 0x41) ||
- (buf[0] == 0x4D && buf[1] == 0x34 && buf[2] == 0x41 && buf[3] == 0x20))
-}
-
-func Ogg(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x4F && buf[1] == 0x67 &&
- buf[2] == 0x67 && buf[3] == 0x53
-}
-
-func Flac(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x66 && buf[1] == 0x4C &&
- buf[2] == 0x61 && buf[3] == 0x43
-}
-
-func Wav(buf []byte) bool {
- return len(buf) > 11 &&
- buf[0] == 0x52 && buf[1] == 0x49 &&
- buf[2] == 0x46 && buf[3] == 0x46 &&
- buf[8] == 0x57 && buf[9] == 0x41 &&
- buf[10] == 0x56 && buf[11] == 0x45
-}
-
-func Amr(buf []byte) bool {
- return len(buf) > 11 &&
- buf[0] == 0x23 && buf[1] == 0x21 &&
- buf[2] == 0x41 && buf[3] == 0x4D &&
- buf[4] == 0x52 && buf[5] == 0x0A
-}
-
-func Aac(buf []byte) bool {
- return len(buf) > 1 &&
- ((buf[0] == 0xFF && buf[1] == 0xF1) ||
- (buf[0] == 0xFF && buf[1] == 0xF9))
-}
-
-func Aiff(buf []byte) bool {
- return len(buf) > 11 &&
- buf[0] == 0x46 && buf[1] == 0x4F &&
- buf[2] == 0x52 && buf[3] == 0x4D &&
- buf[8] == 0x41 && buf[9] == 0x49 &&
- buf[10] == 0x46 && buf[11] == 0x46
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/document.go b/vendor/github.com/h2non/filetype/matchers/document.go
deleted file mode 100644
index b898c0ff7..000000000
--- a/vendor/github.com/h2non/filetype/matchers/document.go
+++ /dev/null
@@ -1,197 +0,0 @@
-package matchers
-
-import (
- "bytes"
- "encoding/binary"
-)
-
-var (
- TypeDoc = newType("doc", "application/msword")
- TypeDocx = newType("docx", "application/vnd.openxmlformats-officedocument.wordprocessingml.document")
- TypeXls = newType("xls", "application/vnd.ms-excel")
- TypeXlsx = newType("xlsx", "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
- TypePpt = newType("ppt", "application/vnd.ms-powerpoint")
- TypePptx = newType("pptx", "application/vnd.openxmlformats-officedocument.presentationml.presentation")
-)
-
-var Document = Map{
- TypeDoc: Doc,
- TypeDocx: Docx,
- TypeXls: Xls,
- TypeXlsx: Xlsx,
- TypePpt: Ppt,
- TypePptx: Pptx,
-}
-
-type docType int
-
-const (
- TYPE_DOC docType = iota
- TYPE_DOCX
- TYPE_XLS
- TYPE_XLSX
- TYPE_PPT
- TYPE_PPTX
- TYPE_OOXML
-)
-
-//reference: https://bz.apache.org/ooo/show_bug.cgi?id=111457
-func Doc(buf []byte) bool {
- if len(buf) > 513 {
- return buf[0] == 0xD0 && buf[1] == 0xCF &&
- buf[2] == 0x11 && buf[3] == 0xE0 &&
- buf[512] == 0xEC && buf[513] == 0xA5
- } else {
- return len(buf) > 3 &&
- buf[0] == 0xD0 && buf[1] == 0xCF &&
- buf[2] == 0x11 && buf[3] == 0xE0
- }
-}
-
-func Docx(buf []byte) bool {
- typ, ok := msooxml(buf)
- return ok && typ == TYPE_DOCX
-}
-
-func Xls(buf []byte) bool {
- if len(buf) > 513 {
- return buf[0] == 0xD0 && buf[1] == 0xCF &&
- buf[2] == 0x11 && buf[3] == 0xE0 &&
- buf[512] == 0x09 && buf[513] == 0x08
- } else {
- return len(buf) > 3 &&
- buf[0] == 0xD0 && buf[1] == 0xCF &&
- buf[2] == 0x11 && buf[3] == 0xE0
- }
-}
-
-func Xlsx(buf []byte) bool {
- typ, ok := msooxml(buf)
- return ok && typ == TYPE_XLSX
-}
-
-func Ppt(buf []byte) bool {
- if len(buf) > 513 {
- return buf[0] == 0xD0 && buf[1] == 0xCF &&
- buf[2] == 0x11 && buf[3] == 0xE0 &&
- buf[512] == 0xA0 && buf[513] == 0x46
- } else {
- return len(buf) > 3 &&
- buf[0] == 0xD0 && buf[1] == 0xCF &&
- buf[2] == 0x11 && buf[3] == 0xE0
- }
-}
-
-func Pptx(buf []byte) bool {
- typ, ok := msooxml(buf)
- return ok && typ == TYPE_PPTX
-}
-
-func msooxml(buf []byte) (typ docType, found bool) {
- signature := []byte{'P', 'K', 0x03, 0x04}
-
- // start by checking for ZIP local file header signature
- if ok := compareBytes(buf, signature, 0); !ok {
- return
- }
-
- // make sure the first file is correct
- if v, ok := checkMSOoml(buf, 0x1E); ok {
- return v, ok
- }
-
- if !compareBytes(buf, []byte("[Content_Types].xml"), 0x1E) &&
- !compareBytes(buf, []byte("_rels/.rels"), 0x1E) &&
- !compareBytes(buf, []byte("docProps"), 0x1E) {
- return
- }
-
- // skip to the second local file header
- // since some documents include a 520-byte extra field following the file
- // header, we need to scan for the next header
- startOffset := int(binary.LittleEndian.Uint32(buf[18:22]) + 49)
- idx := search(buf, startOffset, 6000)
- if idx == -1 {
- return
- }
-
- // now skip to the *third* local file header; again, we need to scan due to a
- // 520-byte extra field following the file header
- startOffset += idx + 4 + 26
- idx = search(buf, startOffset, 6000)
- if idx == -1 {
- return
- }
-
- // and check the subdirectory name to determine which type of OOXML
- // file we have. Correct the mimetype with the registered ones:
- // http://technet.microsoft.com/en-us/library/cc179224.aspx
- startOffset += idx + 4 + 26
- if typ, ok := checkMSOoml(buf, startOffset); ok {
- return typ, ok
- }
-
- // OpenOffice/Libreoffice orders ZIP entry differently, so check the 4th file
- startOffset += 26
- idx = search(buf, startOffset, 6000)
- if idx == -1 {
- return TYPE_OOXML, true
- }
-
- startOffset += idx + 4 + 26
- if typ, ok := checkMSOoml(buf, startOffset); ok {
- return typ, ok
- } else {
- return TYPE_OOXML, true
- }
-}
-
-func compareBytes(slice, subSlice []byte, startOffset int) bool {
- sl := len(subSlice)
-
- if startOffset+sl > len(slice) {
- return false
- }
-
- s := slice[startOffset : startOffset+sl]
- for i := range s {
- if subSlice[i] != s[i] {
- return false
- }
- }
-
- return true
-}
-
-func checkMSOoml(buf []byte, offset int) (typ docType, ok bool) {
- ok = true
-
- switch {
- case compareBytes(buf, []byte("word/"), offset):
- typ = TYPE_DOCX
- case compareBytes(buf, []byte("ppt/"), offset):
- typ = TYPE_PPTX
- case compareBytes(buf, []byte("xl/"), offset):
- typ = TYPE_XLSX
- default:
- ok = false
- }
-
- return
-}
-
-func search(buf []byte, start, rangeNum int) int {
- length := len(buf)
- end := start + rangeNum
- signature := []byte{'P', 'K', 0x03, 0x04}
-
- if end > length {
- end = length
- }
-
- if start >= end {
- return -1
- }
-
- return bytes.Index(buf[start:end], signature)
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/font.go b/vendor/github.com/h2non/filetype/matchers/font.go
deleted file mode 100644
index f39171675..000000000
--- a/vendor/github.com/h2non/filetype/matchers/font.go
+++ /dev/null
@@ -1,45 +0,0 @@
-package matchers
-
-var (
- TypeWoff = newType("woff", "application/font-woff")
- TypeWoff2 = newType("woff2", "application/font-woff")
- TypeTtf = newType("ttf", "application/font-sfnt")
- TypeOtf = newType("otf", "application/font-sfnt")
-)
-
-var Font = Map{
- TypeWoff: Woff,
- TypeWoff2: Woff2,
- TypeTtf: Ttf,
- TypeOtf: Otf,
-}
-
-func Woff(buf []byte) bool {
- return len(buf) > 7 &&
- buf[0] == 0x77 && buf[1] == 0x4F &&
- buf[2] == 0x46 && buf[3] == 0x46 &&
- buf[4] == 0x00 && buf[5] == 0x01 &&
- buf[6] == 0x00 && buf[7] == 0x00
-}
-
-func Woff2(buf []byte) bool {
- return len(buf) > 7 &&
- buf[0] == 0x77 && buf[1] == 0x4F &&
- buf[2] == 0x46 && buf[3] == 0x32 &&
- buf[4] == 0x00 && buf[5] == 0x01 &&
- buf[6] == 0x00 && buf[7] == 0x00
-}
-
-func Ttf(buf []byte) bool {
- return len(buf) > 4 &&
- buf[0] == 0x00 && buf[1] == 0x01 &&
- buf[2] == 0x00 && buf[3] == 0x00 &&
- buf[4] == 0x00
-}
-
-func Otf(buf []byte) bool {
- return len(buf) > 4 &&
- buf[0] == 0x4F && buf[1] == 0x54 &&
- buf[2] == 0x54 && buf[3] == 0x4F &&
- buf[4] == 0x00
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/image.go b/vendor/github.com/h2non/filetype/matchers/image.go
deleted file mode 100644
index 0465d0d68..000000000
--- a/vendor/github.com/h2non/filetype/matchers/image.go
+++ /dev/null
@@ -1,143 +0,0 @@
-package matchers
-
-import "github.com/h2non/filetype/matchers/isobmff"
-
-var (
- TypeJpeg = newType("jpg", "image/jpeg")
- TypeJpeg2000 = newType("jp2", "image/jp2")
- TypePng = newType("png", "image/png")
- TypeGif = newType("gif", "image/gif")
- TypeWebp = newType("webp", "image/webp")
- TypeCR2 = newType("cr2", "image/x-canon-cr2")
- TypeTiff = newType("tif", "image/tiff")
- TypeBmp = newType("bmp", "image/bmp")
- TypeJxr = newType("jxr", "image/vnd.ms-photo")
- TypePsd = newType("psd", "image/vnd.adobe.photoshop")
- TypeIco = newType("ico", "image/vnd.microsoft.icon")
- TypeHeif = newType("heif", "image/heif")
- TypeDwg = newType("dwg", "image/vnd.dwg")
-)
-
-var Image = Map{
- TypeJpeg: Jpeg,
- TypeJpeg2000: Jpeg2000,
- TypePng: Png,
- TypeGif: Gif,
- TypeWebp: Webp,
- TypeCR2: CR2,
- TypeTiff: Tiff,
- TypeBmp: Bmp,
- TypeJxr: Jxr,
- TypePsd: Psd,
- TypeIco: Ico,
- TypeHeif: Heif,
- TypeDwg: Dwg,
-}
-
-func Jpeg(buf []byte) bool {
- return len(buf) > 2 &&
- buf[0] == 0xFF &&
- buf[1] == 0xD8 &&
- buf[2] == 0xFF
-}
-
-func Jpeg2000(buf []byte) bool {
- return len(buf) > 12 &&
- buf[0] == 0x0 &&
- buf[1] == 0x0 &&
- buf[2] == 0x0 &&
- buf[3] == 0xC &&
- buf[4] == 0x6A &&
- buf[5] == 0x50 &&
- buf[6] == 0x20 &&
- buf[7] == 0x20 &&
- buf[8] == 0xD &&
- buf[9] == 0xA &&
- buf[10] == 0x87 &&
- buf[11] == 0xA &&
- buf[12] == 0x0
-}
-
-func Png(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x89 && buf[1] == 0x50 &&
- buf[2] == 0x4E && buf[3] == 0x47
-}
-
-func Gif(buf []byte) bool {
- return len(buf) > 2 &&
- buf[0] == 0x47 && buf[1] == 0x49 && buf[2] == 0x46
-}
-
-func Webp(buf []byte) bool {
- return len(buf) > 11 &&
- buf[8] == 0x57 && buf[9] == 0x45 &&
- buf[10] == 0x42 && buf[11] == 0x50
-}
-
-func CR2(buf []byte) bool {
- return len(buf) > 10 &&
- ((buf[0] == 0x49 && buf[1] == 0x49 && buf[2] == 0x2A && buf[3] == 0x0) || // Little Endian
- (buf[0] == 0x4D && buf[1] == 0x4D && buf[2] == 0x0 && buf[3] == 0x2A)) && // Big Endian
- buf[8] == 0x43 && buf[9] == 0x52 && // CR2 magic word
- buf[10] == 0x02 // CR2 major version
-}
-
-func Tiff(buf []byte) bool {
- return len(buf) > 10 &&
- ((buf[0] == 0x49 && buf[1] == 0x49 && buf[2] == 0x2A && buf[3] == 0x0) || // Little Endian
- (buf[0] == 0x4D && buf[1] == 0x4D && buf[2] == 0x0 && buf[3] == 0x2A)) && // Big Endian
- !CR2(buf) // To avoid conflicts differentiate Tiff from CR2
-}
-
-func Bmp(buf []byte) bool {
- return len(buf) > 1 &&
- buf[0] == 0x42 &&
- buf[1] == 0x4D
-}
-
-func Jxr(buf []byte) bool {
- return len(buf) > 2 &&
- buf[0] == 0x49 &&
- buf[1] == 0x49 &&
- buf[2] == 0xBC
-}
-
-func Psd(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x38 && buf[1] == 0x42 &&
- buf[2] == 0x50 && buf[3] == 0x53
-}
-
-func Ico(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x00 && buf[1] == 0x00 &&
- buf[2] == 0x01 && buf[3] == 0x00
-}
-
-func Heif(buf []byte) bool {
- if !isobmff.IsISOBMFF(buf) {
- return false
- }
-
- majorBrand, _, compatibleBrands := isobmff.GetFtyp(buf)
- if majorBrand == "heic" {
- return true
- }
-
- if majorBrand == "mif1" || majorBrand == "msf1" {
- for _, compatibleBrand := range compatibleBrands {
- if compatibleBrand == "heic" {
- return true
- }
- }
- }
-
- return false
-}
-
-func Dwg(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x41 && buf[1] == 0x43 &&
- buf[2] == 0x31 && buf[3] == 0x30
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/isobmff/isobmff.go b/vendor/github.com/h2non/filetype/matchers/isobmff/isobmff.go
deleted file mode 100644
index b3e39bf59..000000000
--- a/vendor/github.com/h2non/filetype/matchers/isobmff/isobmff.go
+++ /dev/null
@@ -1,37 +0,0 @@
-package isobmff
-
-import "encoding/binary"
-
-// IsISOBMFF checks whether the given buffer represents ISO Base Media File Format data
-func IsISOBMFF(buf []byte) bool {
- if len(buf) < 16 || string(buf[4:8]) != "ftyp" {
- return false
- }
-
- if ftypLength := binary.BigEndian.Uint32(buf[0:4]); len(buf) < int(ftypLength) {
- return false
- }
-
- return true
-}
-
-// GetFtyp returns the major brand, minor version and compatible brands of the ISO-BMFF data
-func GetFtyp(buf []byte) (string, string, []string) {
- if len(buf) < 17 {
- return "", "", []string{""}
- }
-
- ftypLength := binary.BigEndian.Uint32(buf[0:4])
-
- majorBrand := string(buf[8:12])
- minorVersion := string(buf[12:16])
-
- compatibleBrands := []string{}
- for i := 16; i < int(ftypLength); i += 4 {
- if len(buf) >= (i + 4) {
- compatibleBrands = append(compatibleBrands, string(buf[i:i+4]))
- }
- }
-
- return majorBrand, minorVersion, compatibleBrands
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/matchers.go b/vendor/github.com/h2non/filetype/matchers/matchers.go
deleted file mode 100644
index 20d74d080..000000000
--- a/vendor/github.com/h2non/filetype/matchers/matchers.go
+++ /dev/null
@@ -1,51 +0,0 @@
-package matchers
-
-import (
- "github.com/h2non/filetype/types"
-)
-
-// Internal shortcut to NewType
-var newType = types.NewType
-
-// Matcher function interface as type alias
-type Matcher func([]byte) bool
-
-// Type interface to store pairs of type with its matcher function
-type Map map[types.Type]Matcher
-
-// Type specific matcher function interface
-type TypeMatcher func([]byte) types.Type
-
-// Store registered file type matchers
-var Matchers = make(map[types.Type]TypeMatcher)
-var MatcherKeys []types.Type
-
-// Create and register a new type matcher function
-func NewMatcher(kind types.Type, fn Matcher) TypeMatcher {
- matcher := func(buf []byte) types.Type {
- if fn(buf) {
- return kind
- }
- return types.Unknown
- }
-
- Matchers[kind] = matcher
- // prepend here so any user defined matchers get added first
- MatcherKeys = append([]types.Type{kind}, MatcherKeys...)
- return matcher
-}
-
-func register(matchers ...Map) {
- MatcherKeys = MatcherKeys[:0]
- for _, m := range matchers {
- for kind, matcher := range m {
- NewMatcher(kind, matcher)
- }
- }
-}
-
-func init() {
- // Arguments order is intentional
- // Archive files will be checked last due to prepend above in func NewMatcher
- register(Archive, Document, Font, Audio, Video, Image, Application)
-}
diff --git a/vendor/github.com/h2non/filetype/matchers/video.go b/vendor/github.com/h2non/filetype/matchers/video.go
deleted file mode 100644
index e97cf28a1..000000000
--- a/vendor/github.com/h2non/filetype/matchers/video.go
+++ /dev/null
@@ -1,145 +0,0 @@
-package matchers
-
-import "bytes"
-
-var (
- TypeMp4 = newType("mp4", "video/mp4")
- TypeM4v = newType("m4v", "video/x-m4v")
- TypeMkv = newType("mkv", "video/x-matroska")
- TypeWebm = newType("webm", "video/webm")
- TypeMov = newType("mov", "video/quicktime")
- TypeAvi = newType("avi", "video/x-msvideo")
- TypeWmv = newType("wmv", "video/x-ms-wmv")
- TypeMpeg = newType("mpg", "video/mpeg")
- TypeFlv = newType("flv", "video/x-flv")
- Type3gp = newType("3gp", "video/3gpp")
-)
-
-var Video = Map{
- TypeMp4: Mp4,
- TypeM4v: M4v,
- TypeMkv: Mkv,
- TypeWebm: Webm,
- TypeMov: Mov,
- TypeAvi: Avi,
- TypeWmv: Wmv,
- TypeMpeg: Mpeg,
- TypeFlv: Flv,
- Type3gp: Match3gp,
-}
-
-func M4v(buf []byte) bool {
- return len(buf) > 10 &&
- buf[4] == 0x66 && buf[5] == 0x74 &&
- buf[6] == 0x79 && buf[7] == 0x70 &&
- buf[8] == 0x4D && buf[9] == 0x34 &&
- buf[10] == 0x56
-}
-
-func Mkv(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x1A && buf[1] == 0x45 &&
- buf[2] == 0xDF && buf[3] == 0xA3 &&
- containsMatroskaSignature(buf, []byte{'m', 'a', 't', 'r', 'o', 's', 'k', 'a'})
-}
-
-func Webm(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x1A && buf[1] == 0x45 &&
- buf[2] == 0xDF && buf[3] == 0xA3 &&
- containsMatroskaSignature(buf, []byte{'w', 'e', 'b', 'm'})
-}
-
-func Mov(buf []byte) bool {
- return len(buf) > 15 && ((buf[0] == 0x0 && buf[1] == 0x0 &&
- buf[2] == 0x0 && buf[3] == 0x14 &&
- buf[4] == 0x66 && buf[5] == 0x74 &&
- buf[6] == 0x79 && buf[7] == 0x70) ||
- (buf[4] == 0x6d && buf[5] == 0x6f && buf[6] == 0x6f && buf[7] == 0x76) ||
- (buf[4] == 0x6d && buf[5] == 0x64 && buf[6] == 0x61 && buf[7] == 0x74) ||
- (buf[12] == 0x6d && buf[13] == 0x64 && buf[14] == 0x61 && buf[15] == 0x74))
-}
-
-func Avi(buf []byte) bool {
- return len(buf) > 10 &&
- buf[0] == 0x52 && buf[1] == 0x49 &&
- buf[2] == 0x46 && buf[3] == 0x46 &&
- buf[8] == 0x41 && buf[9] == 0x56 &&
- buf[10] == 0x49
-}
-
-func Wmv(buf []byte) bool {
- return len(buf) > 9 &&
- buf[0] == 0x30 && buf[1] == 0x26 &&
- buf[2] == 0xB2 && buf[3] == 0x75 &&
- buf[4] == 0x8E && buf[5] == 0x66 &&
- buf[6] == 0xCF && buf[7] == 0x11 &&
- buf[8] == 0xA6 && buf[9] == 0xD9
-}
-
-func Mpeg(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x0 && buf[1] == 0x0 &&
- buf[2] == 0x1 && buf[3] >= 0xb0 &&
- buf[3] <= 0xbf
-}
-
-func Flv(buf []byte) bool {
- return len(buf) > 3 &&
- buf[0] == 0x46 && buf[1] == 0x4C &&
- buf[2] == 0x56 && buf[3] == 0x01
-}
-
-func Mp4(buf []byte) bool {
- return len(buf) > 11 &&
- (buf[4] == 'f' && buf[5] == 't' && buf[6] == 'y' && buf[7] == 'p') &&
- ((buf[8] == 'a' && buf[9] == 'v' && buf[10] == 'c' && buf[11] == '1') ||
- (buf[8] == 'd' && buf[9] == 'a' && buf[10] == 's' && buf[11] == 'h') ||
- (buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '2') ||
- (buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '3') ||
- (buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '4') ||
- (buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '5') ||
- (buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == '6') ||
- (buf[8] == 'i' && buf[9] == 's' && buf[10] == 'o' && buf[11] == 'm') ||
- (buf[8] == 'm' && buf[9] == 'm' && buf[10] == 'p' && buf[11] == '4') ||
- (buf[8] == 'm' && buf[9] == 'p' && buf[10] == '4' && buf[11] == '1') ||
- (buf[8] == 'm' && buf[9] == 'p' && buf[10] == '4' && buf[11] == '2') ||
- (buf[8] == 'm' && buf[9] == 'p' && buf[10] == '4' && buf[11] == 'v') ||
- (buf[8] == 'm' && buf[9] == 'p' && buf[10] == '7' && buf[11] == '1') ||
- (buf[8] == 'M' && buf[9] == 'S' && buf[10] == 'N' && buf[11] == 'V') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'A' && buf[11] == 'S') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'C') ||
- (buf[8] == 'N' && buf[9] == 'S' && buf[10] == 'D' && buf[11] == 'C') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'H') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'M') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'P') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'S' && buf[11] == 'S') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'C') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'H') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'M') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'P') ||
- (buf[8] == 'N' && buf[9] == 'D' && buf[10] == 'X' && buf[11] == 'S') ||
- (buf[8] == 'F' && buf[9] == '4' && buf[10] == 'V' && buf[11] == ' ') ||
- (buf[8] == 'F' && buf[9] == '4' && buf[10] == 'P' && buf[11] == ' '))
-}
-
-func Match3gp(buf []byte) bool {
- return len(buf) > 10 &&
- buf[4] == 0x66 && buf[5] == 0x74 && buf[6] == 0x79 &&
- buf[7] == 0x70 && buf[8] == 0x33 && buf[9] == 0x67 &&
- buf[10] == 0x70
-}
-
-func containsMatroskaSignature(buf, subType []byte) bool {
- limit := 4096
- if len(buf) < limit {
- limit = len(buf)
- }
-
- index := bytes.Index(buf[:limit], subType)
- if index < 3 {
- return false
- }
-
- return buf[index-3] == 0x42 && buf[index-2] == 0x82
-}
diff --git a/vendor/github.com/h2non/filetype/types/defaults.go b/vendor/github.com/h2non/filetype/types/defaults.go
deleted file mode 100644
index 0d985a05d..000000000
--- a/vendor/github.com/h2non/filetype/types/defaults.go
+++ /dev/null
@@ -1,4 +0,0 @@
-package types
-
-// Unknown default type
-var Unknown = NewType("unknown", "")
diff --git a/vendor/github.com/h2non/filetype/types/mime.go b/vendor/github.com/h2non/filetype/types/mime.go
deleted file mode 100644
index fe8ea822e..000000000
--- a/vendor/github.com/h2non/filetype/types/mime.go
+++ /dev/null
@@ -1,14 +0,0 @@
-package types
-
-// MIME stores the file MIME type values
-type MIME struct {
- Type string
- Subtype string
- Value string
-}
-
-// Creates a new MIME type
-func NewMIME(mime string) MIME {
- kind, subtype := splitMime(mime)
- return MIME{Type: kind, Subtype: subtype, Value: mime}
-}
diff --git a/vendor/github.com/h2non/filetype/types/split.go b/vendor/github.com/h2non/filetype/types/split.go
deleted file mode 100644
index 68a5a8b3b..000000000
--- a/vendor/github.com/h2non/filetype/types/split.go
+++ /dev/null
@@ -1,11 +0,0 @@
-package types
-
-import "strings"
-
-func splitMime(s string) (string, string) {
- x := strings.Split(s, "/")
- if len(x) > 1 {
- return x[0], x[1]
- }
- return x[0], ""
-}
diff --git a/vendor/github.com/h2non/filetype/types/type.go b/vendor/github.com/h2non/filetype/types/type.go
deleted file mode 100644
index 5cf7dfc4b..000000000
--- a/vendor/github.com/h2non/filetype/types/type.go
+++ /dev/null
@@ -1,16 +0,0 @@
-package types
-
-// Type represents a file MIME type and its extension
-type Type struct {
- MIME MIME
- Extension string
-}
-
-// NewType creates a new Type
-func NewType(ext, mime string) Type {
- t := Type{
- MIME: NewMIME(mime),
- Extension: ext,
- }
- return Add(t)
-}
diff --git a/vendor/github.com/h2non/filetype/types/types.go b/vendor/github.com/h2non/filetype/types/types.go
deleted file mode 100644
index f59e256f0..000000000
--- a/vendor/github.com/h2non/filetype/types/types.go
+++ /dev/null
@@ -1,23 +0,0 @@
-package types
-
-import "sync"
-
-// Types Support concurrent map writes
-var Types sync.Map
-
-// Add registers a new type in the package
-func Add(t Type) Type {
- Types.Store(t.Extension, t)
- return t
-}
-
-// Get retrieves a Type by extension
-func Get(ext string) Type {
- if tmp, ok := Types.Load(ext); ok {
- kind := tmp.(Type)
- if kind.Extension != "" {
- return kind
- }
- }
- return Unknown
-}
diff --git a/vendor/github.com/h2non/filetype/version.go b/vendor/github.com/h2non/filetype/version.go
deleted file mode 100644
index d3730313f..000000000
--- a/vendor/github.com/h2non/filetype/version.go
+++ /dev/null
@@ -1,4 +0,0 @@
-package filetype
-
-// Version exposes the current package version.
-const Version = "1.1.3"
diff --git a/vendor/github.com/h2non/go-is-svg/.editorconfig b/vendor/github.com/h2non/go-is-svg/.editorconfig
deleted file mode 100644
index 000dc0a7a..000000000
--- a/vendor/github.com/h2non/go-is-svg/.editorconfig
+++ /dev/null
@@ -1,12 +0,0 @@
-root = true
-
-[*]
-indent_style = tabs
-indent_size = 2
-end_of_line = lf
-charset = utf-8
-trim_trailing_whitespace = true
-insert_final_newline = true
-
-[*.md]
-trim_trailing_whitespace = false
diff --git a/vendor/github.com/h2non/go-is-svg/.gitignore b/vendor/github.com/h2non/go-is-svg/.gitignore
deleted file mode 100644
index 3cf256521..000000000
--- a/vendor/github.com/h2non/go-is-svg/.gitignore
+++ /dev/null
@@ -1,7 +0,0 @@
-/bimg
-/bundle
-bin
-/*.jpg
-/*.png
-/*.webp
-/fixtures/*_out.*
diff --git a/vendor/github.com/h2non/go-is-svg/.travis.yml b/vendor/github.com/h2non/go-is-svg/.travis.yml
deleted file mode 100644
index d5a81534a..000000000
--- a/vendor/github.com/h2non/go-is-svg/.travis.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-language: go
-
-go:
- - 1.5
- - 1.6
- - 1.7
- - tip
-
-before_install:
- - go get github.com/nbio/st
- - go get -u -v github.com/axw/gocov/gocov
- - go get -u -v github.com/mattn/goveralls
- - go get -u -v github.com/golang/lint/golint
-
-script:
- - diff -u <(echo -n) <(gofmt -s -d ./)
- - diff -u <(echo -n) <(go vet ./...)
- - diff -u <(echo -n) <(golint ./...)
- - go test -v -race ./...
- - go test -v -race -covermode=atomic -coverprofile=coverage.out
-
-after_success:
- - goveralls -coverprofile=coverage.out -service=travis-ci
diff --git a/vendor/github.com/h2non/go-is-svg/LICENSE b/vendor/github.com/h2non/go-is-svg/LICENSE
deleted file mode 100644
index f67807d00..000000000
--- a/vendor/github.com/h2non/go-is-svg/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-The MIT License
-
-Copyright (c) 2016 Tomas Aparicio
-
-Permission is hereby granted, free of charge, to any person
-obtaining a copy of this software and associated documentation
-files (the "Software"), to deal in the Software without
-restriction, including without limitation the rights to use,
-copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following
-conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
-OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
-HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
-WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
-OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/h2non/go-is-svg/README.md b/vendor/github.com/h2non/go-is-svg/README.md
deleted file mode 100644
index d4a0ee6c0..000000000
--- a/vendor/github.com/h2non/go-is-svg/README.md
+++ /dev/null
@@ -1,47 +0,0 @@
-# go-is-svg [![Build Status](https://travis-ci.org/h2non/go-is-svg.png)](https://travis-ci.org/h2non/go-is-svg) [![GoDoc](https://godoc.org/github.com/h2non/go-is-svg?status.svg)](https://godoc.org/github.com/h2non/go-is-svg) [![Coverage Status](https://coveralls.io/repos/github/h2non/go-is-svg/badge.svg?branch=master)](https://coveralls.io/github/h2non/go-is-svg?branch=master) [![Go Report Card](https://goreportcard.com/badge/github.com/h2non/go-is-svg)](https://goreportcard.com/report/github.com/h2non/go-is-svg)
-
-Tiny package to verify if a given file buffer is an SVG image in Go (golang).
-
-See also [filetype](https://github.com/h2non/filetype) package for binary files type inference.
-
-## Installation
-
-```bash
-go get -u github.com/h2non/go-is-svg
-```
-
-## Example
-
-```go
-package main
-
-import (
- "fmt"
- "io/ioutil"
-
- svg "github.com/h2non/go-is-svg"
-)
-
-func main() {
- buf, err := ioutil.ReadFile("_example/example.svg")
- if err != nil {
- fmt.Printf("Error: %s\n", err)
- return
- }
-
- if svg.Is(buf) {
- fmt.Println("File is an SVG")
- } else {
- fmt.Println("File is NOT an SVG")
- }
-}
-```
-
-Run example:
-```bash
-go run _example/example.go
-```
-
-## License
-
-MIT - Tomas Aparicio
diff --git a/vendor/github.com/h2non/go-is-svg/svg.go b/vendor/github.com/h2non/go-is-svg/svg.go
deleted file mode 100644
index 062f6e1f6..000000000
--- a/vendor/github.com/h2non/go-is-svg/svg.go
+++ /dev/null
@@ -1,36 +0,0 @@
-package issvg
-
-import (
- "regexp"
- "unicode/utf8"
-)
-
-var (
- htmlCommentRegex = regexp.MustCompile("(?i)")
- svgRegex = regexp.MustCompile(`(?i)^\s*(?:<\?xml[^>]*>\s*)?(?:]*>\s*)?